extern crate cargo;
+extern crate clap;
extern crate env_logger;
#[macro_use]
extern crate failure;
extern crate git2_curl;
-extern crate toml;
extern crate log;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
-extern crate clap;
+extern crate toml;
use std::env;
use std::fs;
use std::collections::BTreeSet;
use cargo::core::shell::Shell;
-use cargo::util::{self, CliResult, lev_distance, Config, CargoResult};
+use cargo::util::{self, lev_distance, CargoResult, CliResult, Config};
use cargo::util::{CliError, ProcessError};
mod cli;
mod command_prelude;
mod commands;
-
fn main() {
env_logger::init();
match config.get_string(&alias_name) {
Ok(value) => {
if let Some(record) = value {
- let alias_commands = record.val
+ let alias_commands = record
+ .val
.split_whitespace()
.map(|s| s.to_string())
.collect();
Err(_) => {
let value = config.get_list(&alias_name)?;
if let Some(record) = value {
- let alias_commands: Vec<String> = record.val
- .iter()
- .map(|s| s.0.to_string())
- .collect();
+ let alias_commands: Vec<String> =
+ record.val.iter().map(|s| s.0.to_string()).collect();
result = Ok(Some(alias_commands));
}
}
}
if is_executable(entry.path()) {
let end = filename.len() - suffix.len();
- commands.insert(
- (filename[prefix.len()..end].to_string(),
- Some(path.display().to_string()))
- );
+ commands.insert((
+ filename[prefix.len()..end].to_string(),
+ Some(path.display().to_string()),
+ ));
}
}
}
commands
}
-
fn find_closest(config: &Config, cmd: &str) -> Option<String> {
let cmds = list_commands(config);
// Only consider candidates with a lev_distance of 3 or less so we don't
Some(command) => command,
None => {
let err = match find_closest(config, cmd) {
- Some(closest) => {
- format_err!("no such subcommand: `{}`\n\n\tDid you mean `{}`?\n",
- cmd,
- closest)
- }
+ Some(closest) => format_err!(
+ "no such subcommand: `{}`\n\n\tDid you mean `{}`?\n",
+ cmd,
+ closest
+ ),
None => format_err!("no such subcommand: `{}`", cmd),
};
- return Err(CliError::new(err, 101))
+ return Err(CliError::new(err, 101));
}
};
let err = match util::process(&command)
.env(cargo::CARGO_ENV, cargo_exe)
.args(&args[1..])
- .exec_replace() {
+ .exec_replace()
+ {
Ok(()) => return Ok(()),
Err(e) => e,
};
}
#[cfg(windows)]
fn is_executable<P: AsRef<Path>>(path: P) -> bool {
- fs::metadata(path).map(|metadata| metadata.is_file()).unwrap_or(false)
+ fs::metadata(path)
+ .map(|metadata| metadata.is_file())
+ .unwrap_or(false)
}
fn search_directories(config: &Config) -> Vec<PathBuf> {
use clap::{AppSettings, Arg, ArgMatches};
-use cargo::{self, Config, CliResult};
+use cargo::{self, CliResult, Config};
use super::list_commands;
use super::commands;
let version = cargo::version();
println!("{}", version);
if is_verbose {
- println!("release: {}.{}.{}",
- version.major,
- version.minor,
- version.patch);
+ println!(
+ "release: {}.{}.{}",
+ version.major, version.minor, version.patch
+ );
if let Some(ref cfg) = version.cfg_info {
if let Some(ref ci) = cfg.commit_info {
println!("commit-hash: {}", ci.commit_hash);
return Ok(());
}
- if args.subcommand_name().is_none() {
- }
+ if args.subcommand_name().is_none() {}
execute_subcommand(config, args)
}
fn execute_subcommand(config: &mut Config, args: ArgMatches) -> CliResult {
config.configure(
args.occurrences_of("verbose") as u32,
- if args.is_present("quiet") { Some(true) } else { None },
+ if args.is_present("quiet") {
+ Some(true)
+ } else {
+ None
+ },
&args.value_of("color").map(|s| s.to_string()),
args.is_present("frozen"),
args.is_present("locked"),
- &args.values_of_lossy("unstable-features").unwrap_or_default(),
+ &args.values_of_lossy("unstable-features")
+ .unwrap_or_default(),
)?;
let (cmd, args) = match args.subcommand() {
}
if let Some(mut alias) = super::aliased_command(config, cmd)? {
- alias.extend(args.values_of("").unwrap_or_default().map(|s| s.to_string()));
+ alias.extend(
+ args.values_of("")
+ .unwrap_or_default()
+ .map(|s| s.to_string()),
+ );
let args = cli()
.setting(AppSettings::NoBinaryName)
.get_matches_from_safe(alias)?;
super::execute_external_subcommand(config, cmd, &ext_args)
}
-
fn cli() -> App {
let app = App::new("cargo")
.settings(&[
AppSettings::AllowExternalSubcommands,
])
.about("")
- .template("\
+ .template(
+ "\
Rust's package manager
USAGE:
install Install a Rust binary
uninstall Uninstall a Rust binary
-See 'cargo help <command>' for more information on a specific command."
- )
- .arg(
- opt("version", "Print version info and exit")
- .short("V")
- )
- .arg(
- opt("list", "List installed commands")
+See 'cargo help <command>' for more information on a specific command.",
)
+ .arg(opt("version", "Print version info and exit").short("V"))
+ .arg(opt("list", "List installed commands"))
+ .arg(opt("explain", "Run `rustc --explain CODE`").value_name("CODE"))
.arg(
- opt("explain", "Run `rustc --explain CODE`")
- .value_name("CODE")
- )
- .arg(
- opt("verbose", "Use verbose output (-vv very verbose/build.rs output)")
- .short("v").multiple(true).global(true)
+ opt(
+ "verbose",
+ "Use verbose output (-vv very verbose/build.rs output)",
+ ).short("v")
+ .multiple(true)
+ .global(true),
)
.arg(
opt("quiet", "No output printed to stdout")
- .short("q").global(true)
+ .short("q")
+ .global(true),
)
.arg(
opt("color", "Coloring: auto, always, never")
- .value_name("WHEN").global(true)
- )
- .arg(
- opt("frozen", "Require Cargo.lock and cache are up to date")
- .global(true)
- )
- .arg(
- opt("locked", "Require Cargo.lock is up to date")
- .global(true)
+ .value_name("WHEN")
+ .global(true),
)
+ .arg(opt("frozen", "Require Cargo.lock and cache are up to date").global(true))
+ .arg(opt("locked", "Require Cargo.lock is up to date").global(true))
.arg(
- Arg::with_name("unstable-features").help("Unstable (nightly-only) flags to Cargo")
- .short("Z").value_name("FLAG").multiple(true).global(true)
+ Arg::with_name("unstable-features")
+ .help("Unstable (nightly-only) flags to Cargo")
+ .short("Z")
+ .value_name("FLAG")
+ .multiple(true)
+ .global(true),
)
- .subcommands(commands::builtin())
- ;
+ .subcommands(commands::builtin());
app
}
use clap::{self, SubCommand};
use cargo::CargoResult;
use cargo::core::Workspace;
-use cargo::ops::{CompileMode, CompileOptions, CompileFilter, Packages, MessageFormat,
- VersionControl, NewOptions};
+use cargo::ops::{CompileFilter, CompileMode, CompileOptions, MessageFormat, NewOptions, Packages,
+ VersionControl};
use cargo::util::important_paths::find_root_manifest_for_wd;
-pub use clap::{Arg, ArgMatches, AppSettings};
-pub use cargo::{Config, CliResult, CliError};
+pub use clap::{AppSettings, Arg, ArgMatches};
+pub use cargo::{CliError, CliResult, Config};
pub type App = clap::App<'static, 'static>;
fn _arg(self, arg: Arg<'static, 'static>) -> Self;
fn arg_package(self, package: &'static str, all: &'static str, exclude: &'static str) -> Self {
- self._arg(opt("package", package).short("p").value_name("SPEC").multiple(true))
- ._arg(opt("all", all))
+ self._arg(
+ opt("package", package)
+ .short("p")
+ .value_name("SPEC")
+ .multiple(true),
+ )._arg(opt("all", all))
._arg(opt("exclude", exclude).value_name("SPEC").multiple(true))
}
fn arg_jobs(self) -> Self {
self._arg(
opt("jobs", "Number of parallel jobs, defaults to # of CPUs")
- .short("j").value_name("N")
+ .short("j")
+ .value_name("N"),
)
}
._arg(opt("all-targets", all))
}
- fn arg_targets_lib_bin(
- self,
- lib: &'static str,
- bin: &'static str,
- bins: &'static str,
- ) -> Self {
+ fn arg_targets_lib_bin(self, lib: &'static str, bin: &'static str, bins: &'static str) -> Self {
self._arg(opt("lib", lib))
._arg(opt("bin", bin).value_name("NAME").multiple(true))
._arg(opt("bins", bins))
._arg(opt("examples", examples))
}
- fn arg_targets_bin_example(
- self,
- bin: &'static str,
- example: &'static str,
- ) -> Self {
+ fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self {
self._arg(opt("bin", bin).value_name("NAME").multiple(true))
._arg(opt("example", example).value_name("NAME").multiple(true))
}
fn arg_features(self) -> Self {
- self
- ._arg(
- opt("features", "Space-separated list of features to activate")
- .value_name("FEATURES")
- )
- ._arg(opt("all-features", "Activate all available features"))
- ._arg(opt("no-default-features", "Do not activate the `default` feature"))
+ self._arg(
+ opt("features", "Space-separated list of features to activate").value_name("FEATURES"),
+ )._arg(opt("all-features", "Activate all available features"))
+ ._arg(opt(
+ "no-default-features",
+ "Do not activate the `default` feature",
+ ))
}
fn arg_release(self, release: &'static str) -> Self {
opt("message-format", "Error format")
.value_name("FMT")
.case_insensitive(true)
- .possible_values(&["human", "json"]).default_value("human")
+ .possible_values(&["human", "json"])
+ .default_value("human"),
)
}
fn arg_new_opts(self) -> Self {
self._arg(
- opt("vcs", "\
-Initialize a new repository for the given version \
-control system (git, hg, pijul, or fossil) or do not \
-initialize any version control at all (none), overriding \
-a global configuration.")
- .value_name("VCS")
- .possible_values(&["git", "hg", "pijul", "fossil", "none"])
- )
- ._arg(opt("bin", "Use a binary (application) template [default]"))
+ opt(
+ "vcs",
+ "\
+ Initialize a new repository for the given version \
+ control system (git, hg, pijul, or fossil) or do not \
+ initialize any version control at all (none), overriding \
+ a global configuration.",
+ ).value_name("VCS")
+ .possible_values(&["git", "hg", "pijul", "fossil", "none"]),
+ )._arg(opt("bin", "Use a binary (application) template [default]"))
._arg(opt("lib", "Use a library template"))
._arg(
- opt("name", "Set the resulting package name, defaults to the directory name")
- .value_name("NAME")
+ opt(
+ "name",
+ "Set the resulting package name, defaults to the directory name",
+ ).value_name("NAME"),
)
}
fn arg_index(self) -> Self {
- self
- ._arg(
- opt("index", "Registry index to upload the package to")
- .value_name("INDEX")
- )
+ self._arg(opt("index", "Registry index to upload the package to").value_name("INDEX"))
._arg(
opt("host", "DEPRECATED, renamed to '--index'")
.value_name("HOST")
- .hidden(true)
+ .hidden(true),
)
}
}
}
pub fn subcommand(name: &'static str) -> App {
- SubCommand::with_name(name)
- .settings(&[
- AppSettings::UnifiedHelpMessage,
- AppSettings::DeriveDisplayOrder,
- AppSettings::DontCollapseArgsInUsage,
- ])
+ SubCommand::with_name(name).settings(&[
+ AppSettings::UnifiedHelpMessage,
+ AppSettings::DeriveDisplayOrder,
+ AppSettings::DontCollapseArgsInUsage,
+ ])
}
-
pub trait ArgMatchesExt {
fn value_of_u32(&self, name: &str) -> CargoResult<Option<u32>> {
let arg = match self._value_of(name) {
None => None,
Some(arg) => Some(arg.parse::<u32>().map_err(|_| {
- clap::Error::value_validation_auto(
- format!("could not parse `{}` as a number", arg)
- )
- })?)
+ clap::Error::value_validation_auto(format!("could not parse `{}` as a number", arg))
+ })?),
};
Ok(arg)
}
fn compile_options<'a>(
&self,
config: &'a Config,
- mode: CompileMode
+ mode: CompileMode,
) -> CargoResult<CompileOptions<'a>> {
let spec = Packages::from_flags(
self._is_present("all"),
spec,
mode,
release: self._is_present("release"),
- filter: CompileFilter::new(self._is_present("lib"),
- self._values_of("bin"), self._is_present("bins"),
- self._values_of("test"), self._is_present("tests"),
- self._values_of("example"), self._is_present("examples"),
- self._values_of("bench"), self._is_present("benches"),
- self._is_present("all-targets")),
+ filter: CompileFilter::new(
+ self._is_present("lib"),
+ self._values_of("bin"),
+ self._is_present("bins"),
+ self._values_of("test"),
+ self._is_present("tests"),
+ self._values_of("example"),
+ self._is_present("examples"),
+ self._values_of("bench"),
+ self._is_present("benches"),
+ self._is_present("all-targets"),
+ ),
message_format,
target_rustdoc_args: None,
target_rustc_args: None,
fn compile_options_for_single_package<'a>(
&self,
config: &'a Config,
- mode: CompileMode
+ mode: CompileMode,
) -> CargoResult<CompileOptions<'a>> {
let mut compile_opts = self.compile_options(config, mode)?;
compile_opts.spec = Packages::Packages(self._values_of("package"));
"none" => VersionControl::NoVcs,
vcs => panic!("Impossible vcs: {:?}", vcs),
});
- NewOptions::new(vcs,
- self._is_present("bin"),
- self._is_present("lib"),
- self._value_of("path").unwrap().to_string(),
- self._value_of("name").map(|s| s.to_string()))
+ NewOptions::new(
+ vcs,
+ self._is_present("bin"),
+ self._is_present("lib"),
+ self._value_of("path").unwrap().to_string(),
+ self._value_of("name").map(|s| s.to_string()),
+ )
}
fn registry(&self, config: &Config) -> CargoResult<Option<String>> {
match self._value_of("registry") {
Some(registry) => {
if !config.cli_unstable().unstable_options {
- return Err(format_err!("registry option is an unstable feature and \
- requires -Zunstable-options to use.").into());
+ return Err(format_err!(
+ "registry option is an unstable feature and \
+ requires -Zunstable-options to use."
+ ).into());
}
Ok(Some(registry.to_string()))
}
config.shell().warn(&msg)?;
Some(host.to_string())
}
- None => self._value_of("index").map(|s| s.to_string())
+ None => self._value_of("index").map(|s| s.to_string()),
};
Ok(index)
}
}
fn _values_of(&self, name: &str) -> Vec<String> {
- self.values_of(name).unwrap_or_default()
+ self.values_of(name)
+ .unwrap_or_default()
.map(|s| s.to_string())
.collect()
}
}
pub fn values(args: &ArgMatches, name: &str) -> Vec<String> {
- args.values_of(name).unwrap_or_default()
+ args.values_of(name)
+ .unwrap_or_default()
.map(|s| s.to_string())
.collect()
}
.setting(AppSettings::TrailingVarArg)
.about("Execute all benchmarks of a local package")
.arg(
- Arg::with_name("BENCHNAME").help(
- "If specified, only run benches containing this string in their names"
- )
+ Arg::with_name("BENCHNAME")
+ .help("If specified, only run benches containing this string in their names"),
)
.arg(
- Arg::with_name("args").help(
- "Arguments for the bench binary"
- ).multiple(true).last(true)
+ Arg::with_name("args")
+ .help("Arguments for the bench binary")
+ .multiple(true)
+ .last(true),
)
-
.arg_targets_all(
"Benchmark only this package's library",
"Benchmark only the specified binary",
"Benchmark all benches",
"Benchmark all targets (default)",
)
-
- .arg(
- opt("no-run", "Compile, but don't run benchmarks")
- )
+ .arg(opt("no-run", "Compile, but don't run benchmarks"))
.arg_package(
"Package to run benchmarks for",
"Benchmark all packages in the workspace",
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
- .arg(
- opt("no-fail-fast", "Run all benchmarks regardless of failure")
- )
- .after_help("\
+ .arg(opt(
+ "no-fail-fast",
+ "Run all benchmarks regardless of failure",
+ ))
+ .after_help(
+ "\
All of the trailing arguments are passed to the benchmark binaries generated
for filtering benchmarks and generally providing options configuring how they
run.
not affect how many jobs are used when running the benchmarks.
Compilation can be customized with the `bench` profile in the manifest.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
};
let mut bench_args = vec![];
- bench_args.extend(args.value_of("BENCHNAME").into_iter().map(|s| s.to_string()));
- bench_args.extend(args.values_of("args").unwrap_or_default().map(|s| s.to_string()));
+ bench_args.extend(
+ args.value_of("BENCHNAME")
+ .into_iter()
+ .map(|s| s.to_string()),
+ );
+ bench_args.extend(
+ args.values_of("args")
+ .unwrap_or_default()
+ .map(|s| s.to_string()),
+ );
let err = ops::run_benches(&ws, &ops, &bench_args)?;
match err {
None => Ok(()),
- Some(err) => {
- Err(match err.exit.as_ref().and_then(|e| e.code()) {
- Some(i) => CliError::new(format_err!("bench failed"), i),
- None => CliError::new(err.into(), 101)
- })
- }
+ Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) {
+ Some(i) => CliError::new(format_err!("bench failed"), i),
+ None => CliError::new(err.into(), 101),
+ }),
}
}
use cargo::ops::{self, CompileMode};
pub fn cli() -> App {
- subcommand("build").alias("b")
+ subcommand("build")
+ .alias("b")
.about("Compile a local package and all of its dependencies")
.arg_package(
"Package to build",
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
- .after_help("\
+ .after_help(
+ "\
If the --package argument is given, then SPEC is a package id specification
which indicates which package should be built. If it is not given, then the
current package is built. For more information on SPEC and its format, see the
Compilation can be configured via the use of profiles which are configured in
the manifest. The default profile for this command is `dev`, but passing
the --release flag will use the `release` profile instead.
-")
-
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
"Check all targets (lib and bin targets by default)",
)
.arg_release("Check artifacts in release mode, with optimizations")
- .arg(
- opt("profile", "Profile to build the selected target for")
- .value_name("PROFILE")
- )
+ .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
.arg_features()
.arg_target_triple("Check for the target triple")
.arg_manifest_path()
.arg_message_format()
- .after_help("\
+ .after_help(
+ "\
If the --package argument is given, then SPEC is a package id specification
which indicates which package should be built. If it is not given, then the
current package is built. For more information on SPEC and its format, see the
The `--profile test` flag can be used to check unit tests with the
`#[cfg(test)]` attribute.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
Some("test") => true,
None => false,
Some(profile) => {
- let err = format_err!("unknown profile: `{}`, only `test` is \
- currently supported", profile);
+ let err = format_err!(
+ "unknown profile: `{}`, only `test` is \
+ currently supported",
+ profile
+ );
return Err(CliError::new(err, 101));
}
};
.about("Remove artifacts that cargo has generated in the past")
.arg(
opt("package", "Package to clean artifacts for")
- .short("p").value_name("SPEC").multiple(true)
+ .short("p")
+ .value_name("SPEC")
+ .multiple(true),
)
.arg_manifest_path()
.arg_target_triple("Target triple to clean output for (default all)")
.arg_release("Whether or not to clean release artifacts")
- .after_help("\
+ .after_help(
+ "\
If the --package argument is given, then SPEC is a package id specification
which indicates which package's artifacts should be cleaned out. If it is not
given, then all packages' artifacts are removed. For more information on SPEC
and its format, see the `cargo help pkgid` command.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
pub fn cli() -> App {
subcommand("doc")
.about("Build a package's documentation")
- .arg(
- opt("open", "Opens the docs in a browser after the operation")
- )
+ .arg(opt(
+ "open",
+ "Opens the docs in a browser after the operation",
+ ))
.arg_package(
"Package to document",
"Document all packages in the workspace",
"Exclude packages from the build",
)
- .arg(
- opt("no-deps", "Don't build documentation for dependencies")
- )
+ .arg(opt("no-deps", "Don't build documentation for dependencies"))
.arg_jobs()
.arg_targets_lib_bin(
"Document only this package's library",
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
- .after_help("\
+ .after_help(
+ "\
By default the documentation for the local package and all dependencies is
built. The output is all placed in `target/doc` in rustdoc's usual format.
which indicates which package should be documented. If it is not given, then the
current package is documented. For more information on SPEC and its format, see
the `cargo help pkgid` command.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
- let mode = CompileMode::Doc { deps: !args.is_present("no-deps") };
+ let mode = CompileMode::Doc {
+ deps: !args.is_present("no-deps"),
+ };
let compile_opts = args.compile_options(config, mode)?;
let doc_opts = DocOptions {
open_result: args.is_present("open"),
subcommand("fetch")
.about("Fetch dependencies of a package from the network")
.arg_manifest_path()
- .after_help("\
+ .after_help(
+ "\
If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally
available. The network is never touched after a `cargo fetch` unless
If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
subcommand("generate-lockfile")
.about("Generate the lockfile for a project")
.arg_manifest_path()
- .after_help("\
+ .after_help(
+ "\
If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally
available. The network is never touched after a `cargo fetch` unless
If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
use command_prelude::*;
-use cargo::core::{GitReference, SourceId, Source};
+use cargo::core::{GitReference, Source, SourceId};
use cargo::sources::GitSource;
use cargo::util::ToUrl;
pub fn cli() -> App {
subcommand("git-checkout")
.about("Checkout a copy of a Git repository")
- .arg(Arg::with_name("url").long("url").value_name("URL").required(true))
- .arg(Arg::with_name("reference").long("reference").value_name("REF").required(true))
+ .arg(
+ Arg::with_name("url")
+ .long("url")
+ .value_name("URL")
+ .required(true),
+ )
+ .arg(
+ Arg::with_name("reference")
+ .long("reference")
+ .value_name("REF")
+ .required(true),
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let opts = args.new_options()?;
ops::init(&opts, config)?;
- config.shell().status("Created", format!("{} project", opts.kind))?;
+ config
+ .shell()
+ .status("Created", format!("{} project", opts.kind))?;
Ok(())
}
subcommand("install")
.about("Create a new cargo package in an existing directory")
.arg(Arg::with_name("crate").multiple(true))
-
.arg(
opt("version", "Specify a version to install from crates.io")
- .alias("vers").value_name("VERSION")
- )
- .arg(
- opt("git", "Git URL to install the specified crate from")
- .value_name("URL")
- )
- .arg(
- opt("branch", "Branch to use when installing from git")
- .value_name("BRANCH")
- )
- .arg(
- opt("tag", "Tag to use when installing from git")
- .value_name("TAG")
- )
- .arg(
- opt("rev", "Specific commit to use when installing from git")
- .value_name("SHA")
+ .alias("vers")
+ .value_name("VERSION"),
)
- .arg(
- opt("path", "Filesystem path to local crate to install")
- .value_name("PATH")
- )
-
- .arg(opt("list", "list all installed packages and their versions"))
-
+ .arg(opt("git", "Git URL to install the specified crate from").value_name("URL"))
+ .arg(opt("branch", "Branch to use when installing from git").value_name("BRANCH"))
+ .arg(opt("tag", "Tag to use when installing from git").value_name("TAG"))
+ .arg(opt("rev", "Specific commit to use when installing from git").value_name("SHA"))
+ .arg(opt("path", "Filesystem path to local crate to install").value_name("PATH"))
+ .arg(opt(
+ "list",
+ "list all installed packages and their versions",
+ ))
.arg_jobs()
- .arg(
- opt("force", "Force overwriting existing crates or binaries")
- .short("f")
- )
+ .arg(opt("force", "Force overwriting existing crates or binaries").short("f"))
.arg_features()
.arg(opt("debug", "Build in debug mode instead of release mode"))
.arg_targets_bins_examples(
"Install only the specified example",
"Install all examples",
)
- .arg(
- opt("root", "Directory to install packages into")
- .value_name("DIR")
- )
- .after_help("\
+ .arg(opt("root", "Directory to install packages into").value_name("DIR"))
+ .after_help(
+ "\
This command manages Cargo's local set of installed binary crates. Only packages
which have [[bin]] targets can be installed, and all binaries are installed into
the installation root's `bin` folder. The installation root is determined, in
in a temporary target directory. To avoid this, the target directory can be
specified by setting the `CARGO_TARGET_DIR` environment variable to a relative
path. In particular, this can be useful for caching build artifacts on
-continuous integration systems.")
+continuous integration systems.",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let mut compile_opts = args.compile_options(config, CompileMode::Build)?;
compile_opts.release = !args.is_present("debug");
- let krates = args.values_of("crate").unwrap_or_default().collect::<Vec<_>>();
+ let krates = args.values_of("crate")
+ .unwrap_or_default()
+ .collect::<Vec<_>>();
let source = if let Some(url) = args.value_of("git") {
let url = url.to_url()?;
if args.is_present("list") {
ops::install_list(root, config)?;
} else {
- ops::install(root, krates, &source, version, &compile_opts, args.is_present("force"))?;
+ ops::install(
+ root,
+ krates,
+ &source,
+ version,
+ &compile_opts,
+ args.is_present("force"),
+ )?;
}
Ok(())
}
#[derive(Serialize)]
pub struct ProjectLocation {
- root: String
+ root: String,
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let root = args.root_manifest(config)?;
let root = root.to_str()
- .ok_or_else(|| format_err!("your project path contains characters \
- not representable in Unicode"))
+ .ok_or_else(|| {
+ format_err!(
+ "your project path contains characters \
+ not representable in Unicode"
+ )
+ })
.map_err(|e| CliError::new(e, 1))?
.to_string();
use std::io::{self, BufRead};
-use cargo::core::{SourceId, Source};
+use cargo::core::{Source, SourceId};
use cargo::sources::RegistrySource;
use cargo::util::{CargoError, CargoResultExt};
use cargo::ops;
pub fn cli() -> App {
subcommand("login")
- .about("Save an api token from the registry locally. \
- If token is not specified, it will be read from stdin.")
+ .about(
+ "Save an api token from the registry locally. \
+ If token is not specified, it will be read from stdin.",
+ )
.arg(Arg::with_name("token"))
.arg(opt("host", "Host to set the token for").value_name("HOST"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
None => {
let host = match registry {
Some(ref _registry) => {
- return Err(format_err!("token must be provided when \
- --registry is provided.").into());
+ return Err(format_err!(
+ "token must be provided when \
+ --registry is provided."
+ ).into());
}
None => {
let src = SourceId::crates_io(config)?;
let mut src = RegistrySource::remote(&src, config);
src.update()?;
let config = src.config()?.unwrap();
- args.value_of("host").map(|s| s.to_string())
+ args.value_of("host")
+ .map(|s| s.to_string())
.unwrap_or(config.api.unwrap())
}
};
println!("please visit {}me and paste the API Token below", host);
let mut line = String::new();
let input = io::stdin();
- input.lock().read_line(&mut line).chain_err(|| {
- "failed to read stdin"
- }).map_err(CargoError::from)?;
+ input
+ .lock()
+ .read_line(&mut line)
+ .chain_err(|| "failed to read stdin")
+ .map_err(CargoError::from)?;
line.trim().to_string()
}
};
pub fn cli() -> App {
subcommand("metadata")
- .about("Output the resolved dependencies of a project, \
- the concrete used versions including overrides, \
- in machine-readable format")
- .arg_features()
- .arg(
- opt("no-deps", "Output information only about the root package \
- and don't fetch dependencies")
+ .about(
+ "Output the resolved dependencies of a project, \
+ the concrete used versions including overrides, \
+ in machine-readable format",
)
+ .arg_features()
+ .arg(opt(
+ "no-deps",
+ "Output information only about the root package \
+ and don't fetch dependencies",
+ ))
.arg_manifest_path()
.arg(
opt("format-version", "Format version")
- .value_name("VERSION").possible_value("1")
+ .value_name("VERSION")
+ .possible_value("1"),
)
}
let version = match args.value_of("format-version") {
None => {
- config.shell().warn("\
- please specify `--format-version` flag explicitly \
- to avoid compatibility problems"
+ config.shell().warn(
+ "\
+ please specify `--format-version` flag explicitly \
+ to avoid compatibility problems",
)?;
1
}
]
}
-pub fn builtin_exec(cmd: & str) -> Option<fn(&mut Config, &ArgMatches) -> CliResult> {
+pub fn builtin_exec(cmd: &str) -> Option<fn(&mut Config, &ArgMatches) -> CliResult> {
let f = match cmd {
"bench" => bench::exec,
"build" => build::exec,
let opts = args.new_options()?;
ops::new(&opts, config)?;
let path = args.value_of("path").unwrap();
- config.shell().status("Created", format!("{} `{}` project", opts.kind, path))?;
+ config
+ .shell()
+ .status("Created", format!("{} `{}` project", opts.kind, path))?;
Ok(())
}
.arg(Arg::with_name("crate"))
.arg(
opt("add", "Name of a user or team to add as an owner")
- .short("a").value_name("LOGIN").multiple(true)
+ .short("a")
+ .value_name("LOGIN")
+ .multiple(true),
)
.arg(
opt("remove", "Name of a user or team to remove as an owner")
- .short("r").value_name("LOGIN").multiple(true)
+ .short("r")
+ .value_name("LOGIN")
+ .multiple(true),
)
.arg(opt("list", "List owners of a crate").short("l"))
.arg(opt("index", "Registry index to modify owners for").value_name("INDEX"))
.arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
- .after_help("\
+ .after_help(
+ "\
This command will modify the owners for a package
on the specified registry(or
default).Note that owners of a package can upload new versions, yank old
caution!
See http://doc.crates.io/crates-io.html#cargo-owner for detailed documentation
- and troubleshooting.")
+ and troubleshooting.",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
use cargo::ops::{self, PackageOpts};
-
pub fn cli() -> App {
subcommand("package")
.about("Assemble the local package into a distributable tarball")
- .arg(opt("list", "Print files included in a package without making one").short("l"))
- .arg(opt("no-verify", "Don't verify the contents by building them"))
- .arg(opt("no-metadata", "Ignore warnings about a lack of human-usable metadata"))
- .arg(opt("allow-dirty", "Allow dirty working directories to be packaged"))
+ .arg(
+ opt(
+ "list",
+ "Print files included in a package without making one",
+ ).short("l"),
+ )
+ .arg(opt(
+ "no-verify",
+ "Don't verify the contents by building them",
+ ))
+ .arg(opt(
+ "no-metadata",
+ "Ignore warnings about a lack of human-usable metadata",
+ ))
+ .arg(opt(
+ "allow-dirty",
+ "Allow dirty working directories to be packaged",
+ ))
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_jobs()
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
- ops::package(&ws, &PackageOpts {
- config,
- verify: !args.is_present("no-verify"),
- list: args.is_present("list"),
- check_metadata: !args.is_present("no-metadata"),
- allow_dirty: args.is_present("allow-dirty"),
- target: args.target(),
- jobs: args.jobs()?,
- registry: None,
- })?;
+ ops::package(
+ &ws,
+ &PackageOpts {
+ config,
+ verify: !args.is_present("no-verify"),
+ list: args.is_present("list"),
+ check_metadata: !args.is_present("no-metadata"),
+ allow_dirty: args.is_present("allow-dirty"),
+ target: args.target(),
+ jobs: args.jobs()?,
+ registry: None,
+ },
+ )?;
Ok(())
}
.arg(Arg::with_name("spec"))
.arg_single_package("Argument to get the package id specifier for")
.arg_manifest_path()
- .after_help("\
+ .after_help(
+ "\
Given a <spec> argument, print out the fully qualified package id specifier.
This command will generate an error if <spec> is ambiguous as to which package
it refers to in the dependency graph. If no <spec> is given, then the pkgid for
crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo
crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar
http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
.about("Upload a package to the registry")
.arg_index()
.arg(opt("token", "Token to use when uploading").value_name("TOKEN"))
- .arg(opt("no-verify", "Don't verify the contents by building them"))
- .arg(opt("allow-dirty", "Allow dirty working directories to be packaged"))
+ .arg(opt(
+ "no-verify",
+ "Don't verify the contents by building them",
+ ))
+ .arg(opt(
+ "allow-dirty",
+ "Allow dirty working directories to be packaged",
+ ))
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_jobs()
- .arg(
- opt("dry-run", "Perform all checks without uploading")
- )
+ .arg(opt("dry-run", "Perform all checks without uploading"))
.arg(opt("registry", "Registry to publish to").value_name("REGISTRY"))
}
let ws = args.workspace(config)?;
let index = args.index(config)?;
- ops::publish(&ws, &PublishOpts {
- config,
- token: args.value_of("token").map(|s| s.to_string()),
- index,
- verify: !args.is_present("no-verify"),
- allow_dirty: args.is_present("allow-dirty"),
- target: args.target(),
- jobs: args.jobs()?,
- dry_run: args.is_present("dry-run"),
- registry,
- })?;
+ ops::publish(
+ &ws,
+ &PublishOpts {
+ config,
+ token: args.value_of("token").map(|s| s.to_string()),
+ index,
+ verify: !args.is_present("no-verify"),
+ allow_dirty: args.is_present("allow-dirty"),
+ target: args.target(),
+ jobs: args.jobs()?,
+ dry_run: args.is_present("dry-run"),
+ registry,
+ },
+ )?;
Ok(())
}
pub fn cli() -> App {
subcommand("read-manifest")
- .about("Deprecated, use `cargo metadata --no-deps` instead.
-Print a JSON representation of a Cargo.toml manifest.")
+ .about(
+ "Deprecated, use `cargo metadata --no-deps` instead.
+Print a JSON representation of a Cargo.toml manifest.",
+ )
.arg_manifest_path()
}
use command_prelude::*;
use cargo::core::Verbosity;
-use cargo::ops::{self, CompileMode, CompileFilter};
+use cargo::ops::{self, CompileFilter, CompileMode};
pub fn cli() -> App {
- subcommand("run").alias("r")
+ subcommand("run")
+ .alias("r")
.setting(AppSettings::TrailingVarArg)
.about("Run the main binary of the local package (src/main.rs)")
.arg(Arg::with_name("args").multiple(true))
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
- .after_help("\
+ .after_help(
+ "\
If neither `--bin` nor `--example` are given, then if the project only has one
bin target it will be run. Otherwise `--bin` specifies the bin target to run,
and `--example` specifies the example target to run. At most one of `--bin` or
All of the trailing arguments are passed to the binary to run. If you're passing
arguments to both Cargo and the binary, the ones after `--` go to the binary,
the ones before go to Cargo.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
- let mut compile_opts = args.compile_options_for_single_package(
- config, CompileMode::Build,
- )?;
+ let mut compile_opts = args.compile_options_for_single_package(config, CompileMode::Build)?;
if !args.is_present("example") && !args.is_present("bin") {
compile_opts.filter = CompileFilter::Default {
required_features_filterable: false,
"Build all targets (lib and bin targets by default)",
)
.arg_release("Build artifacts in release mode, with optimizations")
- .arg(
- opt("profile", "Profile to build the selected target for")
- .value_name("PROFILE")
- )
+ .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
.arg_features()
.arg_target_triple("Target triple which compiles will be for")
.arg_manifest_path()
.arg_message_format()
- .after_help("\
+ .after_help(
+ "\
The specified target for the current package (or package specified by SPEC if
provided) will be compiled along with all of its dependencies. The specified
<args>... will all be passed to the final compiler invocation, not any of the
must be used to select which target is compiled. To pass flags to all compiler
processes spawned by Cargo, use the $RUSTFLAGS environment variable or the
`build.rustflags` configuration option.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
Some("bench") => CompileMode::Bench,
Some("check") => CompileMode::Check { test: false },
Some(mode) => {
- let err = format_err!("unknown profile: `{}`, use dev,
- test, or bench", mode);
+ let err = format_err!(
+ "unknown profile: `{}`, use dev,
+ test, or bench",
+ mode
+ );
return Err(CliError::new(err, 101));
}
};
- let mut compile_opts = args.compile_options_for_single_package(
- config, mode,
- )?;
+ let mut compile_opts = args.compile_options_for_single_package(config, mode)?;
compile_opts.target_rustc_args = Some(values(args, "args"));
ops::compile(&ws, &compile_opts)?;
Ok(())
.setting(AppSettings::TrailingVarArg)
.about("Build a package's documentation, using specified custom flags.")
.arg(Arg::with_name("args").multiple(true))
- .arg(opt("open", "Opens the docs in a browser after the operation"))
+ .arg(opt(
+ "open",
+ "Opens the docs in a browser after the operation",
+ ))
.arg_single_package("Package to document")
.arg_jobs()
.arg_targets_all(
.arg_release("Build artifacts in release mode, with optimizations")
.arg_manifest_path()
.arg_message_format()
- .after_help("\
+ .after_help(
+ "\
The specified target for the current package (or package specified by SPEC if
provided) will be documented with the specified <opts>... being passed to the
final rustdoc invocation. Dependencies will not be documented as part of this
which indicates which package should be documented. If it is not given, then the
current package is documented. For more information on SPEC and its format, see
the `cargo help pkgid` command.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
- let mut compile_opts = args.compile_options_for_single_package(
- config, CompileMode::Doc { deps: false },
- )?;
+ let mut compile_opts =
+ args.compile_options_for_single_package(config, CompileMode::Doc { deps: false })?;
compile_opts.target_rustdoc_args = Some(values(args, "args"));
let doc_opts = DocOptions {
open_result: args.is_present("open"),
.arg(Arg::with_name("query").multiple(true))
.arg_index()
.arg(
- opt("limit", "Limit the number of results (default: 10, max: 100)")
- .value_name("LIMIT")
+ opt(
+ "limit",
+ "Limit the number of results (default: 10, max: 100)",
+ ).value_name("LIMIT"),
)
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
}
use cargo::ops::{self, CompileMode};
pub fn cli() -> App {
- subcommand("test").alias("t")
+ subcommand("test")
+ .alias("t")
.setting(AppSettings::TrailingVarArg)
.about("Execute all unit and integration tests of a local package")
.arg(
- Arg::with_name("TESTNAME").help(
- "If specified, only run tests containing this string in their names"
- )
+ Arg::with_name("TESTNAME")
+ .help("If specified, only run tests containing this string in their names"),
)
.arg(
- Arg::with_name("args").help(
- "Arguments for the test binary"
- ).multiple(true).last(true)
+ Arg::with_name("args")
+ .help("Arguments for the test binary")
+ .multiple(true)
+ .last(true),
)
.arg_targets_all(
"Test only this package's library",
"Test all targets (default)",
)
.arg(opt("doc", "Test only this library's documentation"))
- .arg(
- opt("no-run", "Compile, but don't run tests")
- )
- .arg(
- opt("no-fail-fast", "Run all tests regardless of failure")
- )
+ .arg(opt("no-run", "Compile, but don't run tests"))
+ .arg(opt("no-fail-fast", "Run all tests regardless of failure"))
.arg_package(
"Package to run tests for",
"Test all packages in the workspace",
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
- .after_help("\
+ .after_help(
+ "\
All of the trailing arguments are passed to the test binaries generated for
filtering tests and generally providing options configuring how they run. For
example, this will run all tests with the name `foo` in their name:
To get the list of all options available for the test binaries use this:
cargo test -- --help
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let doc = args.is_present("doc");
if doc {
compile_opts.mode = ops::CompileMode::Doctest;
- compile_opts.filter = ops::CompileFilter::new(true,
- Vec::new(), false,
- Vec::new(), false,
- Vec::new(), false,
- Vec::new(), false,
- false);
+ compile_opts.filter = ops::CompileFilter::new(
+ true,
+ Vec::new(),
+ false,
+ Vec::new(),
+ false,
+ Vec::new(),
+ false,
+ Vec::new(),
+ false,
+ false,
+ );
}
let ops = ops::TestOptions {
// important so we explicitly mention it and reconfigure
let mut test_args = vec![];
test_args.extend(args.value_of("TESTNAME").into_iter().map(|s| s.to_string()));
- test_args.extend(args.values_of("args").unwrap_or_default().map(|s| s.to_string()));
+ test_args.extend(
+ args.values_of("args")
+ .unwrap_or_default()
+ .map(|s| s.to_string()),
+ );
let err = ops::run_tests(&ws, &ops, &test_args)?;
return match err {
None => Ok(()),
- Some(err) => {
- Err(match err.exit.as_ref().and_then(|e| e.code()) {
- Some(i) => CliError::new(format_err!("{}", err.hint(&ws)), i),
- None => CliError::new(err.into(), 101),
- })
- }
+ Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) {
+ Some(i) => CliError::new(format_err!("{}", err.hint(&ws)), i),
+ None => CliError::new(err.into(), 101),
+ }),
};
}
.arg(Arg::with_name("spec").multiple(true))
.arg(
opt("bin", "Only uninstall the binary NAME")
- .value_name("NAME").multiple(true)
+ .value_name("NAME")
+ .multiple(true),
)
- .arg(
- opt("root", "Directory to uninstall packages from")
- .value_name("DIR")
- )
- .after_help("\
+ .arg(opt("root", "Directory to uninstall packages from").value_name("DIR"))
+ .after_help(
+ "\
The argument SPEC is a package id specification (see `cargo help pkgid`) to
specify which crate should be uninstalled. By default all binaries are
uninstalled for a crate but the `--bin` and `--example` flags can be used to
only uninstall particular binaries.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
.about("Update dependencies as recorded in the local lock file")
.arg(
opt("package", "Package to clean artifacts for")
- .short("p").value_name("SPEC").multiple(true)
- )
- .arg(opt("aggressive", "Force updating all dependencies of <name> as well"))
- .arg(
- opt("precise", "Update a single dependency to exactly PRECISE")
- .value_name("PRECISE")
+ .short("p")
+ .value_name("SPEC")
+ .multiple(true),
)
+ .arg(opt(
+ "aggressive",
+ "Force updating all dependencies of <name> as well",
+ ))
+ .arg(opt("precise", "Update a single dependency to exactly PRECISE").value_name("PRECISE"))
.arg_manifest_path()
- .after_help("\
+ .after_help(
+ "\
This command requires that a `Cargo.lock` already exists as generated by
`cargo build` or related commands.
updated.
For more information about package id specifications, see `cargo help pkgid`.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let file = File::open(&filename);
match file.and_then(|mut f| f.read_to_string(&mut contents)) {
Ok(_) => {}
- Err(e) => fail("invalid", &format!("error reading file: {}", e))
+ Err(e) => fail("invalid", &format!("error reading file: {}", e)),
};
if contents.parse::<toml::Value>().is_err() {
fail("invalid", "invalid-format");
use cargo;
pub fn cli() -> App {
- subcommand("version")
- .about("Show version information")
+ subcommand("version").about("Show version information")
}
pub fn exec(_config: &mut Config, _args: &ArgMatches) -> CliResult {
subcommand("yank")
.about("Remove a pushed crate from the index")
.arg(Arg::with_name("crate"))
- .arg(
- opt("vers", "The version to yank or un-yank").value_name("VERSION")
- )
- .arg(opt("undo", "Undo a yank, putting a version back into the index"))
+ .arg(opt("vers", "The version to yank or un-yank").value_name("VERSION"))
+ .arg(opt(
+ "undo",
+ "Undo a yank, putting a version back into the index",
+ ))
.arg(opt("index", "Registry index to yank from").value_name("INDEX"))
.arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
- .after_help("\
+ .after_help(
+ "\
The yank command removes a previously pushed crate's version from the server's
index. This command does not delete any data, and the crate will still be
available for download via the registry's download link.
Note that existing crates locked to a yanked version will still be able to
download the yanked version to use it. Cargo will, however, not allow any new
crates to be locked to any yanked version.
-")
+",
+ )
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let registry = args.registry(config)?;
- ops::yank(config,
- args.value_of("crate").map(|s| s.to_string()),
- args.value_of("vers").map(|s| s.to_string()),
- args.value_of("token").map(|s| s.to_string()),
- args.value_of("index").map(|s| s.to_string()),
- args.is_present("undo"),
- registry)?;
+ ops::yank(
+ config,
+ args.value_of("crate").map(|s| s.to_string()),
+ args.value_of("vers").map(|s| s.to_string()),
+ args.value_of("token").map(|s| s.to_string()),
+ args.value_of("index").map(|s| s.to_string()),
+ args.is_present("undo"),
+ registry,
+ )?;
Ok(())
}
use semver::ReqParseError;
use serde::ser;
-use core::{SourceId, Summary, PackageId};
+use core::{PackageId, SourceId, Summary};
use core::interning::InternedString;
use util::{Cfg, CfgExpr, Config};
-use util::errors::{CargoResult, CargoResultExt, CargoError};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
/// Information about a dependency requested by a Cargo manifest.
/// Cheap to copy.
impl ser::Serialize for Dependency {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
SerializedDependency {
name: &*self.name(),
Build,
}
-fn parse_req_with_deprecated(req: &str,
- extra: Option<(&PackageId, &Config)>)
- -> CargoResult<VersionReq> {
+fn parse_req_with_deprecated(
+ req: &str,
+ extra: Option<(&PackageId, &Config)>,
+) -> CargoResult<VersionReq> {
match VersionReq::parse(req) {
Err(e) => {
let (inside, config) = match extra {
};
match e {
ReqParseError::DeprecatedVersionRequirement(requirement) => {
- let msg = format!("\
+ let msg = format!(
+ "\
parsed version requirement `{}` is no longer valid
Previous versions of Cargo accepted this malformed requirement,
update to a fixed version or contact the upstream maintainer about
this warning.
",
-req, inside.name(), inside.version(), requirement);
+ req,
+ inside.name(),
+ inside.version(),
+ requirement
+ );
config.shell().warn(&msg)?;
Ok(requirement)
}
e => Err(e.into()),
}
- },
+ }
Ok(v) => Ok(v),
}
}
impl ser::Serialize for Kind {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
match *self {
Kind::Normal => None,
impl Dependency {
/// Attempt to create a `Dependency` from an entry in the manifest.
- pub fn parse(name: &str,
- version: Option<&str>,
- source_id: &SourceId,
- inside: &PackageId,
- config: &Config) -> CargoResult<Dependency> {
+ pub fn parse(
+ name: &str,
+ version: Option<&str>,
+ source_id: &SourceId,
+ inside: &PackageId,
+ config: &Config,
+ ) -> CargoResult<Dependency> {
let arg = Some((inside, config));
let (specified_req, version_req) = match version {
Some(v) => (true, parse_req_with_deprecated(v, arg)?),
- None => (false, VersionReq::any())
+ None => (false, VersionReq::any()),
};
let mut ret = Dependency::new_override(name, source_id);
}
/// Attempt to create a `Dependency` from an entry in the manifest.
- pub fn parse_no_deprecated(name: &str,
- version: Option<&str>,
- source_id: &SourceId) -> CargoResult<Dependency> {
+ pub fn parse_no_deprecated(
+ name: &str,
+ version: Option<&str>,
+ source_id: &SourceId,
+ ) -> CargoResult<Dependency> {
let (specified_req, version_req) = match version {
Some(v) => (true, parse_req_with_deprecated(v, None)?),
- None => (false, VersionReq::any())
+ None => (false, VersionReq::any()),
};
let mut ret = Dependency::new_override(name, source_id);
pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency {
assert_eq!(self.inner.source_id, *id.source_id());
assert!(self.inner.req.matches(id.version()));
- trace!("locking dep from `{}` with `{}` at {} to {}",
- self.name(),
- self.version_req(),
- self.source_id(),
- id);
+ trace!(
+ "locking dep from `{}` with `{}` at {} to {}",
+ self.name(),
+ self.version_req(),
+ self.source_id(),
+ id
+ );
self.set_version_req(VersionReq::exact(id.version()))
.set_source_id(id.source_id().clone())
}
/// Returns true if the package (`sum`) can fulfill this dependency request.
pub fn matches_ignoring_source(&self, sum: &Summary) -> bool {
- self.name() == sum.package_id().name() &&
- self.version_req().matches(sum.package_id().version())
+ self.name() == sum.package_id().name()
+ && self.version_req().matches(sum.package_id().version())
}
/// Returns true if the package (`id`) can fulfill this dependency request.
pub fn matches_id(&self, id: &PackageId) -> bool {
- self.inner.name == id.name() &&
- (self.inner.only_match_name || (self.inner.req.matches(id.version()) &&
- &self.inner.source_id == id.source_id()))
+ self.inner.name == id.name()
+ && (self.inner.only_match_name
+ || (self.inner.req.matches(id.version())
+ && &self.inner.source_id == id.source_id()))
}
- pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId)
- -> Dependency {
+ pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId) -> Dependency {
if self.source_id() != to_replace {
self
} else {
pub fn matches(&self, name: &str, cfg: Option<&[Cfg]>) -> bool {
match *self {
Platform::Name(ref p) => p == name,
- Platform::Cfg(ref p) => {
- match cfg {
- Some(cfg) => p.matches(cfg),
- None => false,
- }
- }
+ Platform::Cfg(ref p) => match cfg {
+ Some(cfg) => p.matches(cfg),
+ None => false,
+ },
}
}
}
impl ser::Serialize for Platform {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
self.to_string().serialize(s)
}
fn from_str(s: &str) -> CargoResult<Platform> {
if s.starts_with("cfg(") && s.ends_with(')') {
- let s = &s[4..s.len()-1];
- let p = s.parse().map(Platform::Cfg).chain_err(|| {
- format_err!("failed to parse `{}` as a cfg expression", s)
- })?;
+ let s = &s[4..s.len() - 1];
+ let p = s.parse()
+ .map(Platform::Cfg)
+ .chain_err(|| format_err!("failed to parse `{}` as a cfg expression", s))?;
Ok(p)
} else {
Ok(Platform::Name(s.to_string()))
use util::errors::CargoResult;
/// The epoch of the compiler (RFC 2052)
-#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq)]
-#[derive(Serialize, Deserialize)]
+#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)]
pub enum Epoch {
/// The 2015 epoch
Epoch2015,
match s {
"2015" => Ok(Epoch::Epoch2015),
"2018" => Ok(Epoch::Epoch2018),
- _ => Err(())
+ _ => Err(()),
}
}
}
}
impl Features {
- pub fn new(features: &[String],
- warnings: &mut Vec<String>) -> CargoResult<Features> {
+ pub fn new(features: &[String], warnings: &mut Vec<String>) -> CargoResult<Features> {
let mut ret = Features::default();
for feature in features {
ret.add(feature, warnings)?;
match status {
Status::Stable => {
- let warning = format!("the cargo feature `{}` is now stable \
- and is no longer necessary to be listed \
- in the manifest", feature);
+ let warning = format!(
+ "the cargo feature `{}` is now stable \
+ and is no longer necessary to be listed \
+ in the manifest",
+ feature
+ );
warnings.push(warning);
}
- Status::Unstable if !nightly_features_allowed() => {
- bail!("the cargo feature `{}` requires a nightly version of \
- Cargo, but this is the `{}` channel",
- feature,
- channel())
- }
+ Status::Unstable if !nightly_features_allowed() => bail!(
+ "the cargo feature `{}` requires a nightly version of \
+ Cargo, but this is the `{}` channel",
+ feature,
+ channel()
+ ),
Status::Unstable => {}
}
let mut msg = format!("feature `{}` is required", feature);
if nightly_features_allowed() {
- let s = format!("\n\nconsider adding `cargo-features = [\"{0}\"]` \
- to the manifest", feature);
+ let s = format!(
+ "\n\nconsider adding `cargo-features = [\"{0}\"]` \
+ to the manifest",
+ feature
+ );
msg.push_str(&s);
} else {
- let s = format!("\n\n\
- this Cargo does not support nightly features, but if you\n\
- switch to nightly channel you can add\n\
- `cargo-features = [\"{}\"]` to enable this feature",
- feature);
+ let s = format!(
+ "\n\n\
+ this Cargo does not support nightly features, but if you\n\
+ switch to nightly channel you can add\n\
+ `cargo-features = [\"{}\"]` to enable this feature",
+ feature
+ );
msg.push_str(&s);
}
bail!("{}", msg);
fn parse_bool(value: Option<&str>) -> CargoResult<bool> {
match value {
- None |
- Some("yes") => Ok(true),
+ None | Some("yes") => Ok(true),
Some("no") => Ok(false),
Some(s) => bail!("expected `no` or `yes`, found: {}", s),
}
fn channel() -> String {
env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS").unwrap_or_else(|_| {
- ::version().cfg_info.map(|c| c.release_channel)
+ ::version()
+ .cfg_info
+ .map(|c| c.release_channel)
.unwrap_or_else(|| String::from("dev"))
})
}
pub fn new(str: &str) -> InternedString {
let mut cache = STRING_CASHE.write().unwrap();
if let Some(&s) = cache.get(str) {
- return InternedString { ptr: s.as_ptr(), len: s.len() };
+ return InternedString {
+ ptr: s.as_ptr(),
+ len: s.len(),
+ };
}
let s = leek(str.to_string());
cache.insert(s);
- InternedString { ptr: s.as_ptr(), len: s.len() }
+ InternedString {
+ ptr: s.as_ptr(),
+ len: s.len(),
+ }
}
pub fn to_inner(&self) -> &'static str {
unsafe {
}
unsafe impl Send for InternedString {}
-unsafe impl Sync for InternedString {}
\ No newline at end of file
+unsafe impl Sync for InternedString {}
-use std::collections::{HashMap, BTreeMap};
+use std::collections::{BTreeMap, HashMap};
use std::fmt;
-use std::path::{PathBuf, Path};
+use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::hash::{Hash, Hasher};
use serde::ser;
use url::Url;
-use core::{Dependency, PackageId, Summary, SourceId, PackageIdSpec};
-use core::{WorkspaceConfig, Epoch, Features, Feature};
+use core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary};
+use core::{Epoch, Feature, Features, WorkspaceConfig};
use core::interning::InternedString;
use util::Config;
use util::toml::TomlManifest;
#[derive(Clone, Debug)]
pub struct DelayedWarning {
pub message: String,
- pub is_critical: bool
+ pub is_critical: bool,
}
#[derive(Clone, Debug)]
pub categories: Vec<String>,
pub license: Option<String>,
pub license_file: Option<String>,
- pub description: Option<String>, // not markdown
- pub readme: Option<String>, // file, not contents
- pub homepage: Option<String>, // url
- pub repository: Option<String>, // url
- pub documentation: Option<String>, // url
+ pub description: Option<String>, // not markdown
+ pub readme: Option<String>, // file, not contents
+ pub homepage: Option<String>, // url
+ pub repository: Option<String>, // url
+ pub documentation: Option<String>, // url
pub badges: BTreeMap<String, BTreeMap<String, String>>,
pub links: Option<String>,
}
pub fn linkable(&self) -> bool {
match *self {
- LibKind::Lib |
- LibKind::Rlib |
- LibKind::Dylib |
- LibKind::ProcMacro => true,
+ LibKind::Lib | LibKind::Rlib | LibKind::Dylib | LibKind::ProcMacro => true,
LibKind::Other(..) => false,
}
}
impl ser::Serialize for TargetKind {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
use self::TargetKind::*;
match *self {
ExampleBin | ExampleLib(_) => vec!["example"],
Test => vec!["test"],
CustomBuild => vec!["custom-build"],
- Bench => vec!["bench"]
+ Bench => vec!["bench"],
}.serialize(s)
}
}
-
// Note that most of the fields here are skipped when serializing because we
// don't want to export them just yet (becomes a public API of Cargo). Others
// though are definitely needed!
#[derive(Clone, PartialEq, Eq, Debug, Hash, Serialize)]
pub struct Profile {
pub opt_level: String,
- #[serde(skip_serializing)]
- pub lto: Lto,
- #[serde(skip_serializing)]
- pub codegen_units: Option<u32>, // None = use rustc default
- #[serde(skip_serializing)]
- pub rustc_args: Option<Vec<String>>,
- #[serde(skip_serializing)]
- pub rustdoc_args: Option<Vec<String>>,
+ #[serde(skip_serializing)] pub lto: Lto,
+ #[serde(skip_serializing)] pub codegen_units: Option<u32>, // None = use rustc default
+ #[serde(skip_serializing)] pub rustc_args: Option<Vec<String>>,
+ #[serde(skip_serializing)] pub rustdoc_args: Option<Vec<String>>,
pub debuginfo: Option<u32>,
pub debug_assertions: bool,
pub overflow_checks: bool,
- #[serde(skip_serializing)]
- pub rpath: bool,
+ #[serde(skip_serializing)] pub rpath: bool,
pub test: bool,
- #[serde(skip_serializing)]
- pub doc: bool,
- #[serde(skip_serializing)]
- pub run_custom_build: bool,
- #[serde(skip_serializing)]
- pub check: bool,
- #[serde(skip_serializing)]
- pub panic: Option<String>,
- #[serde(skip_serializing)]
- pub incremental: bool,
+ #[serde(skip_serializing)] pub doc: bool,
+ #[serde(skip_serializing)] pub run_custom_build: bool,
+ #[serde(skip_serializing)] pub check: bool,
+ #[serde(skip_serializing)] pub panic: Option<String>,
+ #[serde(skip_serializing)] pub incremental: bool,
}
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
}
impl Manifest {
- pub fn new(summary: Summary,
- targets: Vec<Target>,
- exclude: Vec<String>,
- include: Vec<String>,
- links: Option<String>,
- metadata: ManifestMetadata,
- profiles: Profiles,
- publish: Option<Vec<String>>,
- publish_lockfile: bool,
- replace: Vec<(PackageIdSpec, Dependency)>,
- patch: HashMap<Url, Vec<Dependency>>,
- workspace: WorkspaceConfig,
- features: Features,
- epoch: Epoch,
- im_a_teapot: Option<bool>,
- original: Rc<TomlManifest>) -> Manifest {
+ pub fn new(
+ summary: Summary,
+ targets: Vec<Target>,
+ exclude: Vec<String>,
+ include: Vec<String>,
+ links: Option<String>,
+ metadata: ManifestMetadata,
+ profiles: Profiles,
+ publish: Option<Vec<String>>,
+ publish_lockfile: bool,
+ replace: Vec<(PackageIdSpec, Dependency)>,
+ patch: HashMap<Url, Vec<Dependency>>,
+ workspace: WorkspaceConfig,
+ features: Features,
+ epoch: Epoch,
+ im_a_teapot: Option<bool>,
+ original: Rc<TomlManifest>,
+ ) -> Manifest {
Manifest {
summary,
targets,
}
}
- pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() }
- pub fn exclude(&self) -> &[String] { &self.exclude }
- pub fn include(&self) -> &[String] { &self.include }
- pub fn metadata(&self) -> &ManifestMetadata { &self.metadata }
- pub fn name(&self) -> InternedString { self.package_id().name() }
- pub fn package_id(&self) -> &PackageId { self.summary.package_id() }
- pub fn summary(&self) -> &Summary { &self.summary }
- pub fn targets(&self) -> &[Target] { &self.targets }
- pub fn version(&self) -> &Version { self.package_id().version() }
- pub fn warnings(&self) -> &[DelayedWarning] { &self.warnings }
- pub fn profiles(&self) -> &Profiles { &self.profiles }
- pub fn publish(&self) -> &Option<Vec<String>> { &self.publish }
- pub fn publish_lockfile(&self) -> bool { self.publish_lockfile }
- pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace }
- pub fn original(&self) -> &TomlManifest { &self.original }
- pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> { &self.patch }
+ pub fn dependencies(&self) -> &[Dependency] {
+ self.summary.dependencies()
+ }
+ pub fn exclude(&self) -> &[String] {
+ &self.exclude
+ }
+ pub fn include(&self) -> &[String] {
+ &self.include
+ }
+ pub fn metadata(&self) -> &ManifestMetadata {
+ &self.metadata
+ }
+ pub fn name(&self) -> InternedString {
+ self.package_id().name()
+ }
+ pub fn package_id(&self) -> &PackageId {
+ self.summary.package_id()
+ }
+ pub fn summary(&self) -> &Summary {
+ &self.summary
+ }
+ pub fn targets(&self) -> &[Target] {
+ &self.targets
+ }
+ pub fn version(&self) -> &Version {
+ self.package_id().version()
+ }
+ pub fn warnings(&self) -> &[DelayedWarning] {
+ &self.warnings
+ }
+ pub fn profiles(&self) -> &Profiles {
+ &self.profiles
+ }
+ pub fn publish(&self) -> &Option<Vec<String>> {
+ &self.publish
+ }
+ pub fn publish_lockfile(&self) -> bool {
+ self.publish_lockfile
+ }
+ pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] {
+ &self.replace
+ }
+ pub fn original(&self) -> &TomlManifest {
+ &self.original
+ }
+ pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> {
+ &self.patch
+ }
pub fn links(&self) -> Option<&str> {
self.links.as_ref().map(|s| &s[..])
}
}
pub fn add_warning(&mut self, s: String) {
- self.warnings.push(DelayedWarning { message: s, is_critical: false })
+ self.warnings.push(DelayedWarning {
+ message: s,
+ is_critical: false,
+ })
}
pub fn add_critical_warning(&mut self, s: String) {
- self.warnings.push(DelayedWarning { message: s, is_critical: true })
+ self.warnings.push(DelayedWarning {
+ message: s,
+ is_critical: true,
+ })
}
pub fn set_summary(&mut self, summary: Summary) {
self.summary = summary;
}
- pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId)
- -> Manifest {
+ pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Manifest {
Manifest {
summary: self.summary.map_source(to_replace, replace_with),
..self
pub fn feature_gate(&self) -> CargoResult<()> {
if self.im_a_teapot.is_some() {
- self.features.require(Feature::test_dummy_unstable()).chain_err(|| {
- format_err!("the `im-a-teapot` manifest key is unstable and may \
- not work properly in England")
- })?;
+ self.features
+ .require(Feature::test_dummy_unstable())
+ .chain_err(|| {
+ format_err!(
+ "the `im-a-teapot` manifest key is unstable and may \
+ not work properly in England"
+ )
+ })?;
}
Ok(())
}
impl VirtualManifest {
- pub fn new(replace: Vec<(PackageIdSpec, Dependency)>,
- patch: HashMap<Url, Vec<Dependency>>,
- workspace: WorkspaceConfig,
- profiles: Profiles) -> VirtualManifest {
+ pub fn new(
+ replace: Vec<(PackageIdSpec, Dependency)>,
+ patch: HashMap<Url, Vec<Dependency>>,
+ workspace: WorkspaceConfig,
+ profiles: Profiles,
+ ) -> VirtualManifest {
VirtualManifest {
replace,
patch,
}
}
- pub fn lib_target(name: &str,
- crate_targets: Vec<LibKind>,
- src_path: PathBuf) -> Target {
+ pub fn lib_target(name: &str, crate_targets: Vec<LibKind>, src_path: PathBuf) -> Target {
Target {
kind: TargetKind::Lib(crate_targets),
name: name.to_string(),
}
}
- pub fn bin_target(name: &str, src_path: PathBuf,
- required_features: Option<Vec<String>>) -> Target {
+ pub fn bin_target(
+ name: &str,
+ src_path: PathBuf,
+ required_features: Option<Vec<String>>,
+ ) -> Target {
Target {
kind: TargetKind::Bin,
name: name.to_string(),
}
}
- pub fn example_target(name: &str,
- crate_targets: Vec<LibKind>,
- src_path: PathBuf,
- required_features: Option<Vec<String>>) -> Target {
+ pub fn example_target(
+ name: &str,
+ crate_targets: Vec<LibKind>,
+ src_path: PathBuf,
+ required_features: Option<Vec<String>>,
+ ) -> Target {
let kind = if crate_targets.is_empty() {
TargetKind::ExampleBin
} else {
}
}
- pub fn test_target(name: &str, src_path: PathBuf,
- required_features: Option<Vec<String>>) -> Target {
+ pub fn test_target(
+ name: &str,
+ src_path: PathBuf,
+ required_features: Option<Vec<String>>,
+ ) -> Target {
Target {
kind: TargetKind::Test,
name: name.to_string(),
}
}
- pub fn bench_target(name: &str, src_path: PathBuf,
- required_features: Option<Vec<String>>) -> Target {
+ pub fn bench_target(
+ name: &str,
+ src_path: PathBuf,
+ required_features: Option<Vec<String>>,
+ ) -> Target {
Target {
kind: TargetKind::Bench,
name: name.to_string(),
}
}
- pub fn name(&self) -> &str { &self.name }
- pub fn crate_name(&self) -> String { self.name.replace("-", "_") }
- pub fn src_path(&self) -> &Path { &self.src_path.path }
- pub fn required_features(&self) -> Option<&Vec<String>> { self.required_features.as_ref() }
- pub fn kind(&self) -> &TargetKind { &self.kind }
- pub fn tested(&self) -> bool { self.tested }
- pub fn harness(&self) -> bool { self.harness }
- pub fn documented(&self) -> bool { self.doc }
- pub fn for_host(&self) -> bool { self.for_host }
- pub fn benched(&self) -> bool { self.benched }
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+ pub fn crate_name(&self) -> String {
+ self.name.replace("-", "_")
+ }
+ pub fn src_path(&self) -> &Path {
+ &self.src_path.path
+ }
+ pub fn required_features(&self) -> Option<&Vec<String>> {
+ self.required_features.as_ref()
+ }
+ pub fn kind(&self) -> &TargetKind {
+ &self.kind
+ }
+ pub fn tested(&self) -> bool {
+ self.tested
+ }
+ pub fn harness(&self) -> bool {
+ self.harness
+ }
+ pub fn documented(&self) -> bool {
+ self.doc
+ }
+ pub fn for_host(&self) -> bool {
+ self.for_host
+ }
+ pub fn benched(&self) -> bool {
+ self.benched
+ }
pub fn doctested(&self) -> bool {
self.doctest && match self.kind {
- TargetKind::Lib(ref kinds) => {
- kinds.iter().any(|k| {
- *k == LibKind::Rlib ||
- *k == LibKind::Lib ||
- *k == LibKind::ProcMacro
- })
- }
+ TargetKind::Lib(ref kinds) => kinds
+ .iter()
+ .any(|k| *k == LibKind::Rlib || *k == LibKind::Lib || *k == LibKind::ProcMacro),
_ => false,
}
}
pub fn is_lib(&self) -> bool {
match self.kind {
TargetKind::Lib(_) => true,
- _ => false
+ _ => false,
}
}
pub fn is_dylib(&self) -> bool {
match self.kind {
TargetKind::Lib(ref libs) => libs.iter().any(|l| *l == LibKind::Dylib),
- _ => false
+ _ => false,
}
}
pub fn is_cdylib(&self) -> bool {
let libs = match self.kind {
TargetKind::Lib(ref libs) => libs,
- _ => return false
+ _ => return false,
};
- libs.iter().any(|l| {
- match *l {
- LibKind::Other(ref s) => s == "cdylib",
- _ => false,
- }
+ libs.iter().any(|l| match *l {
+ LibKind::Other(ref s) => s == "cdylib",
+ _ => false,
})
}
pub fn linkable(&self) -> bool {
match self.kind {
- TargetKind::Lib(ref kinds) => {
- kinds.iter().any(|k| k.linkable())
- }
- _ => false
+ TargetKind::Lib(ref kinds) => kinds.iter().any(|k| k.linkable()),
+ _ => false,
}
}
- pub fn is_bin(&self) -> bool { self.kind == TargetKind::Bin }
+ pub fn is_bin(&self) -> bool {
+ self.kind == TargetKind::Bin
+ }
pub fn is_example(&self) -> bool {
match self.kind {
- TargetKind::ExampleBin |
- TargetKind::ExampleLib(..) => true,
- _ => false
+ TargetKind::ExampleBin | TargetKind::ExampleLib(..) => true,
+ _ => false,
}
}
// Needed for --all-examples in contexts where only runnable examples make sense
match self.kind {
TargetKind::ExampleBin => true,
- _ => false
+ _ => false,
}
}
- pub fn is_test(&self) -> bool { self.kind == TargetKind::Test }
- pub fn is_bench(&self) -> bool { self.kind == TargetKind::Bench }
- pub fn is_custom_build(&self) -> bool { self.kind == TargetKind::CustomBuild }
+ pub fn is_test(&self) -> bool {
+ self.kind == TargetKind::Test
+ }
+ pub fn is_bench(&self) -> bool {
+ self.kind == TargetKind::Bench
+ }
+ pub fn is_custom_build(&self) -> bool {
+ self.kind == TargetKind::CustomBuild
+ }
/// Returns the arguments suitable for `--crate-type` to pass to rustc.
pub fn rustc_crate_types(&self) -> Vec<&str> {
match self.kind {
- TargetKind::Lib(ref kinds) |
- TargetKind::ExampleLib(ref kinds) => {
+ TargetKind::Lib(ref kinds) | TargetKind::ExampleLib(ref kinds) => {
kinds.iter().map(LibKind::crate_type).collect()
}
- TargetKind::CustomBuild |
- TargetKind::Bench |
- TargetKind::Test |
- TargetKind::ExampleBin |
- TargetKind::Bin => vec!["bin"],
+ TargetKind::CustomBuild
+ | TargetKind::Bench
+ | TargetKind::Test
+ | TargetKind::ExampleBin
+ | TargetKind::Bin => vec!["bin"],
}
}
pub fn can_lto(&self) -> bool {
match self.kind {
TargetKind::Lib(ref v) => {
- !v.contains(&LibKind::Rlib) &&
- !v.contains(&LibKind::Dylib) &&
- !v.contains(&LibKind::Lib)
+ !v.contains(&LibKind::Rlib) && !v.contains(&LibKind::Dylib)
+ && !v.contains(&LibKind::Lib)
}
_ => true,
}
TargetKind::Bin => write!(f, "Target(bin: {})", self.name),
TargetKind::Test => write!(f, "Target(test: {})", self.name),
TargetKind::Bench => write!(f, "Target(bench: {})", self.name),
- TargetKind::ExampleBin |
- TargetKind::ExampleLib(..) => write!(f, "Target(example: {})", self.name),
+ TargetKind::ExampleBin | TargetKind::ExampleLib(..) => {
+ write!(f, "Target(example: {})", self.name)
+ }
TargetKind::CustomBuild => write!(f, "Target(script)"),
}
}
} else {
write!(f, "Profile(build)")
}
-
}
}
pub use self::dependency::Dependency;
-pub use self::features::{Epoch, Features, Feature, CliUnstable};
+pub use self::features::{CliUnstable, Epoch, Feature, Features};
pub use self::manifest::{EitherManifest, VirtualManifest};
-pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles};
+pub use self::manifest::{LibKind, Manifest, Profile, Profiles, Target, TargetKind};
pub use self::package::{Package, PackageSet};
pub use self::package_id::PackageId;
pub use self::package_id_spec::PackageIdSpec;
pub use self::registry::Registry;
pub use self::resolver::Resolve;
pub use self::shell::{Shell, Verbosity};
-pub use self::source::{Source, SourceId, SourceMap, GitReference};
+pub use self::source::{GitReference, Source, SourceId, SourceMap};
pub use self::summary::Summary;
pub use self::workspace::{Members, Workspace, WorkspaceConfig, WorkspaceRootConfig};
use std::cell::{Ref, RefCell};
-use std::collections::{HashMap, BTreeMap};
+use std::collections::{BTreeMap, HashMap};
use std::fmt;
use std::hash;
use std::path::{Path, PathBuf};
use lazycell::LazyCell;
use core::{Dependency, Manifest, PackageId, SourceId, Target};
-use core::{Summary, SourceMap};
+use core::{SourceMap, Summary};
use core::interning::InternedString;
use ops;
-use util::{Config, internal, lev_distance};
+use util::{internal, lev_distance, Config};
use util::errors::{CargoResult, CargoResultExt};
/// Information about a package that is available somewhere in the file system.
impl ser::Serialize for Package {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
let summary = self.manifest.summary();
let package_id = summary.package_id();
impl Package {
/// Create a package from a manifest and its location
- pub fn new(manifest: Manifest,
- manifest_path: &Path) -> Package {
+ pub fn new(manifest: Manifest, manifest_path: &Path) -> Package {
Package {
manifest,
manifest_path: manifest_path.to_path_buf(),
}
/// Get the manifest dependencies
- pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() }
+ pub fn dependencies(&self) -> &[Dependency] {
+ self.manifest.dependencies()
+ }
/// Get the manifest
- pub fn manifest(&self) -> &Manifest { &self.manifest }
+ pub fn manifest(&self) -> &Manifest {
+ &self.manifest
+ }
/// Get the path to the manifest
- pub fn manifest_path(&self) -> &Path { &self.manifest_path }
+ pub fn manifest_path(&self) -> &Path {
+ &self.manifest_path
+ }
/// Get the name of the package
- pub fn name(&self) -> InternedString { self.package_id().name() }
+ pub fn name(&self) -> InternedString {
+ self.package_id().name()
+ }
/// Get the PackageId object for the package (fully defines a package)
- pub fn package_id(&self) -> &PackageId { self.manifest.package_id() }
+ pub fn package_id(&self) -> &PackageId {
+ self.manifest.package_id()
+ }
/// Get the root folder of the package
- pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() }
+ pub fn root(&self) -> &Path {
+ self.manifest_path.parent().unwrap()
+ }
/// Get the summary for the package
- pub fn summary(&self) -> &Summary { self.manifest.summary() }
+ pub fn summary(&self) -> &Summary {
+ self.manifest.summary()
+ }
/// Get the targets specified in the manifest
- pub fn targets(&self) -> &[Target] { self.manifest.targets() }
+ pub fn targets(&self) -> &[Target] {
+ self.manifest.targets()
+ }
/// Get the current package version
- pub fn version(&self) -> &Version { self.package_id().version() }
+ pub fn version(&self) -> &Version {
+ self.package_id().version()
+ }
/// Get the package authors
- pub fn authors(&self) -> &Vec<String> { &self.manifest.metadata().authors }
+ pub fn authors(&self) -> &Vec<String> {
+ &self.manifest.metadata().authors
+ }
/// Whether the package is set to publish
- pub fn publish(&self) -> &Option<Vec<String>> { self.manifest.publish() }
+ pub fn publish(&self) -> &Option<Vec<String>> {
+ self.manifest.publish()
+ }
/// Whether the package uses a custom build script for any target
pub fn has_custom_build(&self) -> bool {
self.targets().iter().any(|t| t.is_custom_build())
}
- pub fn find_closest_target(&self,
- target: &str,
- is_expected_kind: fn(&Target)-> bool) -> Option<&Target> {
+ pub fn find_closest_target(
+ &self,
+ target: &str,
+ is_expected_kind: fn(&Target) -> bool,
+ ) -> Option<&Target> {
let targets = self.targets();
- let matches = targets.iter().filter(|t| is_expected_kind(t))
- .map(|t| (lev_distance(target, t.name()), t))
- .filter(|&(d, _)| d < 4);
+ let matches = targets
+ .iter()
+ .filter(|t| is_expected_kind(t))
+ .map(|t| (lev_distance(target, t.name()), t))
+ .filter(|&(d, _)| d < 4);
matches.min_by_key(|t| t.0).map(|t| t.1)
}
- pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId)
- -> Package {
+ pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Package {
Package {
manifest: self.manifest.map_source(to_replace, replace_with),
manifest_path: self.manifest_path,
pub fn to_registry_toml(&self, config: &Config) -> CargoResult<String> {
let manifest = self.manifest().original().prepare_for_publish(config)?;
let toml = toml::to_string(&manifest)?;
- Ok(format!("\
- # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\
- #\n\
- # When uploading crates to the registry Cargo will automatically\n\
- # \"normalize\" Cargo.toml files for maximal compatibility\n\
- # with all versions of Cargo and also rewrite `path` dependencies\n\
- # to registry (e.g. crates.io) dependencies\n\
- #\n\
- # If you believe there's an error in this file please file an\n\
- # issue against the rust-lang/cargo repository. If you're\n\
- # editing this file be aware that the upstream Cargo.toml\n\
- # will likely look very different (and much more reasonable)\n\
- \n\
- {}\
- ", toml))
+ Ok(format!(
+ "\
+ # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\
+ #\n\
+ # When uploading crates to the registry Cargo will automatically\n\
+ # \"normalize\" Cargo.toml files for maximal compatibility\n\
+ # with all versions of Cargo and also rewrite `path` dependencies\n\
+ # to registry (e.g. crates.io) dependencies\n\
+ #\n\
+ # If you believe there's an error in this file please file an\n\
+ # issue against the rust-lang/cargo repository. If you're\n\
+ # editing this file be aware that the upstream Cargo.toml\n\
+ # will likely look very different (and much more reasonable)\n\
+ \n\
+ {}\
+ ",
+ toml
+ ))
}
}
}
impl<'cfg> PackageSet<'cfg> {
- pub fn new(package_ids: &[PackageId],
- sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
+ pub fn new(package_ids: &[PackageId], sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
PackageSet {
- packages: package_ids.iter().map(|id| {
- (id.clone(), LazyCell::new())
- }).collect(),
+ packages: package_ids
+ .iter()
+ .map(|id| (id.clone(), LazyCell::new()))
+ .collect(),
sources: RefCell::new(sources),
}
}
- pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item=&'a PackageId> + 'a> {
+ pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item = &'a PackageId> + 'a> {
Box::new(self.packages.keys())
}
pub fn get(&self, id: &PackageId) -> CargoResult<&Package> {
- let slot = self.packages.get(id).ok_or_else(|| {
- internal(format!("couldn't find `{}` in package set", id))
- })?;
+ let slot = self.packages
+ .get(id)
+ .ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
if let Some(pkg) = slot.borrow() {
- return Ok(pkg)
+ return Ok(pkg);
}
let mut sources = self.sources.borrow_mut();
- let source = sources.get_mut(id.source_id()).ok_or_else(|| {
- internal(format!("couldn't find source for `{}`", id))
- })?;
- let pkg = source.download(id).chain_err(|| {
- format_err!("unable to get packages from source")
- })?;
+ let source = sources
+ .get_mut(id.source_id())
+ .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
+ let pkg = source
+ .download(id)
+ .chain_err(|| format_err!("unable to get packages from source"))?;
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
}
impl ser::Serialize for PackageId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer
+ where
+ S: ser::Serializer,
{
- s.collect_str(&format_args!("{} {} ({})",
- self.inner.name,
- self.inner.version,
- self.inner.source_id.to_url()))
+ s.collect_str(&format_args!(
+ "{} {} ({})",
+ self.inner.name,
+ self.inner.version,
+ self.inner.source_id.to_url()
+ ))
}
}
impl<'de> de::Deserialize<'de> for PackageId {
fn deserialize<D>(d: D) -> Result<PackageId, D::Error>
- where D: de::Deserializer<'de>
+ where
+ D: de::Deserializer<'de>,
{
let string = String::deserialize(d)?;
let mut s = string.splitn(3, ' ');
Some(s) => s,
None => return Err(de::Error::custom("invalid serialized PackageId")),
};
- let version = semver::Version::parse(version)
- .map_err(de::Error::custom)?;
+ let version = semver::Version::parse(version).map_err(de::Error::custom)?;
let url = match s.next() {
Some(s) => s,
None => return Err(de::Error::custom("invalid serialized PackageId")),
let url = if url.starts_with('(') && url.ends_with(')') {
&url[1..url.len() - 1]
} else {
- return Err(de::Error::custom("invalid serialized PackageId"))
-
+ return Err(de::Error::custom("invalid serialized PackageId"));
};
let source_id = SourceId::from_url(url).map_err(de::Error::custom)?;
}
impl PackageId {
- pub fn new<T: ToSemver>(name: &str, version: T,
- sid: &SourceId) -> CargoResult<PackageId> {
+ pub fn new<T: ToSemver>(name: &str, version: T, sid: &SourceId) -> CargoResult<PackageId> {
let v = version.to_semver()?;
Ok(PackageId {
inner: Arc::new(PackageIdInner {
})
}
- pub fn name(&self) -> InternedString { self.inner.name }
- pub fn version(&self) -> &semver::Version { &self.inner.version }
- pub fn source_id(&self) -> &SourceId { &self.inner.source_id }
+ pub fn name(&self) -> InternedString {
+ self.inner.name
+ }
+ pub fn version(&self) -> &semver::Version {
+ &self.inner.version
+ }
+ pub fn source_id(&self) -> &SourceId {
+ &self.inner.source_id
+ }
pub fn with_precise(&self, precise: Option<String>) -> PackageId {
PackageId {
impl fmt::Debug for PackageId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_struct("PackageId")
- .field("name", &self.inner.name)
- .field("version", &self.inner.version.to_string())
- .field("source", &self.inner.source_id.to_string())
- .finish()
+ .field("name", &self.inner.name)
+ .field("version", &self.inner.version.to_string())
+ .field("source", &self.inner.source_id.to_string())
+ .finish()
}
}
use url::Url;
use core::PackageId;
-use util::{ToUrl, ToSemver};
+use util::{ToSemver, ToUrl};
use util::errors::{CargoResult, CargoResultExt};
#[derive(Clone, PartialEq, Eq, Debug)]
}
pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId>
- where I: IntoIterator<Item=&'a PackageId>
+ where
+ I: IntoIterator<Item = &'a PackageId>,
{
- let spec = PackageIdSpec::parse(spec).chain_err(|| {
- format_err!("invalid package id specification: `{}`", spec)
- })?;
+ let spec = PackageIdSpec::parse(spec)
+ .chain_err(|| format_err!("invalid package id specification: `{}`", spec))?;
spec.query(i)
}
let frag = url.fragment().map(|s| s.to_owned());
url.set_fragment(None);
let (name, version) = {
- let mut path = url.path_segments().ok_or_else(|| {
- format_err!("pkgid urls must have a path: {}", url)
- })?;
+ let mut path = url.path_segments()
+ .ok_or_else(|| format_err!("pkgid urls must have a path: {}", url))?;
let path_name = path.next_back().ok_or_else(|| {
- format_err!("pkgid urls must have at least one path \
- component: {}", url)
+ format_err!(
+ "pkgid urls must have at least one path \
+ component: {}",
+ url
+ )
})?;
match frag {
Some(fragment) => {
(name_or_version.to_string(), Some(version))
}
None => {
- if name_or_version.chars().next().unwrap()
- .is_alphabetic() {
+ if name_or_version.chars().next().unwrap().is_alphabetic() {
(name_or_version.to_string(), None)
} else {
let version = name_or_version.to_semver()?;
})
}
- pub fn name(&self) -> &str { &self.name }
- pub fn version(&self) -> Option<&Version> { self.version.as_ref() }
- pub fn url(&self) -> Option<&Url> { self.url.as_ref() }
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+ pub fn version(&self) -> Option<&Version> {
+ self.version.as_ref()
+ }
+ pub fn url(&self) -> Option<&Url> {
+ self.url.as_ref()
+ }
pub fn set_url(&mut self, url: Url) {
self.url = Some(url);
}
pub fn matches(&self, package_id: &PackageId) -> bool {
- if self.name() != &*package_id.name() { return false }
+ if self.name() != &*package_id.name() {
+ return false;
+ }
if let Some(ref v) = self.version {
if v != package_id.version() {
match self.url {
Some(ref u) => u == package_id.source_id().url(),
- None => true
+ None => true,
}
}
pub fn query<'a, I>(&self, i: I) -> CargoResult<&'a PackageId>
- where I: IntoIterator<Item=&'a PackageId>
+ where
+ I: IntoIterator<Item = &'a PackageId>,
{
let mut ids = i.into_iter().filter(|p| self.matches(*p));
let ret = match ids.next() {
Some(id) => id,
- None => bail!("package id specification `{}` \
- matched no packages", self),
+ None => bail!(
+ "package id specification `{}` \
+ matched no packages",
+ self
+ ),
};
return match ids.next() {
Some(other) => {
- let mut msg = format!("There are multiple `{}` packages in \
- your project, and the specification \
- `{}` is ambiguous.\n\
- Please re-run this command \
- with `-p <spec>` where `<spec>` is one \
- of the following:",
- self.name(), self);
+ let mut msg = format!(
+ "There are multiple `{}` packages in \
+ your project, and the specification \
+ `{}` is ambiguous.\n\
+ Please re-run this command \
+ with `-p <spec>` where `<spec>` is one \
+ of the following:",
+ self.name(),
+ self
+ );
let mut vec = vec![ret, other];
vec.extend(ids);
minimize(&mut msg, &vec, self);
Err(format_err!("{}", msg))
}
- None => Ok(ret)
+ None => Ok(ret),
};
- fn minimize(msg: &mut String,
- ids: &[&PackageId],
- spec: &PackageIdSpec) {
+ fn minimize(msg: &mut String, ids: &[&PackageId], spec: &PackageIdSpec) {
let mut version_cnt = HashMap::new();
for id in ids {
*version_cnt.entry(id.version()).or_insert(0) += 1;
}
for id in ids {
if version_cnt[id.version()] == 1 {
- msg.push_str(&format!("\n {}:{}", spec.name(),
- id.version()));
+ msg.push_str(&format!("\n {}:{}", spec.name(), id.version()));
} else {
- msg.push_str(&format!("\n {}",
- PackageIdSpec::from_package_id(*id)));
+ msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id)));
}
}
}
write!(f, "#{}", self.name)?;
}
}
- None => { printed_name = true; write!(f, "{}", self.name)? }
+ None => {
+ printed_name = true;
+ write!(f, "{}", self.name)?
+ }
}
if let Some(ref v) = self.version {
- write!(f, "{}{}", if printed_name {":"} else {"#"}, v)?;
+ write!(f, "{}{}", if printed_name { ":" } else { "#" }, v)?;
}
Ok(())
}
assert_eq!(parsed.to_string(), spec);
}
- ok("http://crates.io/foo#1.2.3", PackageIdSpec {
- name: "foo".to_string(),
- version: Some(Version::parse("1.2.3").unwrap()),
- url: Some(Url::parse("http://crates.io/foo").unwrap()),
- });
- ok("http://crates.io/foo#bar:1.2.3", PackageIdSpec {
- name: "bar".to_string(),
- version: Some(Version::parse("1.2.3").unwrap()),
- url: Some(Url::parse("http://crates.io/foo").unwrap()),
- });
- ok("crates.io/foo", PackageIdSpec {
- name: "foo".to_string(),
- version: None,
- url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
- });
- ok("crates.io/foo#1.2.3", PackageIdSpec {
- name: "foo".to_string(),
- version: Some(Version::parse("1.2.3").unwrap()),
- url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
- });
- ok("crates.io/foo#bar", PackageIdSpec {
- name: "bar".to_string(),
- version: None,
- url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
- });
- ok("crates.io/foo#bar:1.2.3", PackageIdSpec {
- name: "bar".to_string(),
- version: Some(Version::parse("1.2.3").unwrap()),
- url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
- });
- ok("foo", PackageIdSpec {
- name: "foo".to_string(),
- version: None,
- url: None,
- });
- ok("foo:1.2.3", PackageIdSpec {
- name: "foo".to_string(),
- version: Some(Version::parse("1.2.3").unwrap()),
- url: None,
- });
+ ok(
+ "http://crates.io/foo#1.2.3",
+ PackageIdSpec {
+ name: "foo".to_string(),
+ version: Some(Version::parse("1.2.3").unwrap()),
+ url: Some(Url::parse("http://crates.io/foo").unwrap()),
+ },
+ );
+ ok(
+ "http://crates.io/foo#bar:1.2.3",
+ PackageIdSpec {
+ name: "bar".to_string(),
+ version: Some(Version::parse("1.2.3").unwrap()),
+ url: Some(Url::parse("http://crates.io/foo").unwrap()),
+ },
+ );
+ ok(
+ "crates.io/foo",
+ PackageIdSpec {
+ name: "foo".to_string(),
+ version: None,
+ url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
+ },
+ );
+ ok(
+ "crates.io/foo#1.2.3",
+ PackageIdSpec {
+ name: "foo".to_string(),
+ version: Some(Version::parse("1.2.3").unwrap()),
+ url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
+ },
+ );
+ ok(
+ "crates.io/foo#bar",
+ PackageIdSpec {
+ name: "bar".to_string(),
+ version: None,
+ url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
+ },
+ );
+ ok(
+ "crates.io/foo#bar:1.2.3",
+ PackageIdSpec {
+ name: "bar".to_string(),
+ version: Some(Version::parse("1.2.3").unwrap()),
+ url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
+ },
+ );
+ ok(
+ "foo",
+ PackageIdSpec {
+ name: "foo".to_string(),
+ version: None,
+ url: None,
+ },
+ );
+ ok(
+ "foo:1.2.3",
+ PackageIdSpec {
+ name: "foo".to_string(),
+ version: Some(Version::parse("1.2.3").unwrap()),
+ url: None,
+ },
+ );
}
#[test]
let foo = PackageId::new("foo", "1.2.3", &sid).unwrap();
let bar = PackageId::new("bar", "1.2.3", &sid).unwrap();
- assert!( PackageIdSpec::parse("foo").unwrap().matches(&foo));
+ assert!(PackageIdSpec::parse("foo").unwrap().matches(&foo));
assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar));
- assert!( PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo));
+ assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo));
assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo));
}
}
use semver::VersionReq;
use url::Url;
-use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId};
+use core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary};
use core::PackageSet;
-use util::{Config, profile};
+use util::{profile, Config};
use util::errors::{CargoResult, CargoResultExt};
use sources::config::SourceConfigMap;
/// See also `core::Source`.
pub trait Registry {
/// Attempt to find the packages that match a dependency request.
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()>;
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()>;
fn query_vec(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
let mut ret = Vec::new();
}
impl<'a, T: ?Sized + Registry + 'a> Registry for Box<T> {
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()> {
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
(**self).query(dep, f)
}
// slightly different precise version listed.
Some(&(_, Kind::Locked)) => {
debug!("load/locked {}", namespace);
- return Ok(())
+ return Ok(());
}
// If the previous source was not a precise source, then we can be
// sure that it's already been updated if we've already loaded it.
Some(&(ref previous, _)) if previous.precise().is_none() => {
debug!("load/precise {}", namespace);
- return Ok(())
+ return Ok(());
}
// If the previous source has the same precise version as we do,
Some(&(ref previous, _)) => {
if previous.precise() == namespace.precise() {
debug!("load/match {}", namespace);
- return Ok(())
+ return Ok(());
}
debug!("load/mismatch {}", namespace);
}
for dep in deps.iter() {
trace!("\t-> {}", dep);
}
- let sub_map = self.locked.entry(id.source_id().clone())
- .or_insert_with(HashMap::new);
- let sub_vec = sub_map.entry(id.name().to_string())
- .or_insert_with(Vec::new);
+ let sub_map = self.locked
+ .entry(id.source_id().clone())
+ .or_insert_with(HashMap::new);
+ let sub_vec = sub_map
+ .entry(id.name().to_string())
+ .or_insert_with(Vec::new);
sub_vec.push((id, deps));
}
// Remember that each dependency listed in `[patch]` has to resolve to
// precisely one package, so that's why we're just creating a flat list
// of summaries which should be the same length as `deps` above.
- let unlocked_summaries = deps.iter().map(|dep| {
- debug!("registring a patch for `{}` with `{}`",
- url,
- dep.name());
-
- // Go straight to the source for resolving `dep`. Load it as we
- // normally would and then ask it directly for the list of summaries
- // corresponding to this `dep`.
- self.ensure_loaded(dep.source_id(), Kind::Normal).chain_err(|| {
- format_err!("failed to load source for a dependency \
- on `{}`", dep.name())
- })?;
-
- let mut summaries = self.sources.get_mut(dep.source_id())
- .expect("loaded source not present")
- .query_vec(dep)?
- .into_iter();
-
- let summary = match summaries.next() {
- Some(summary) => summary,
- None => {
- bail!("patch for `{}` in `{}` did not resolve to any crates. If this is \
- unexpected, you may wish to consult: \
- https://github.com/rust-lang/cargo/issues/4678",
- dep.name(), url)
+ let unlocked_summaries = deps.iter()
+ .map(|dep| {
+ debug!("registring a patch for `{}` with `{}`", url, dep.name());
+
+ // Go straight to the source for resolving `dep`. Load it as we
+ // normally would and then ask it directly for the list of summaries
+ // corresponding to this `dep`.
+ self.ensure_loaded(dep.source_id(), Kind::Normal)
+ .chain_err(|| {
+ format_err!(
+ "failed to load source for a dependency \
+ on `{}`",
+ dep.name()
+ )
+ })?;
+
+ let mut summaries = self.sources
+ .get_mut(dep.source_id())
+ .expect("loaded source not present")
+ .query_vec(dep)?
+ .into_iter();
+
+ let summary = match summaries.next() {
+ Some(summary) => summary,
+ None => bail!(
+ "patch for `{}` in `{}` did not resolve to any crates. If this is \
+ unexpected, you may wish to consult: \
+ https://github.com/rust-lang/cargo/issues/4678",
+ dep.name(),
+ url
+ ),
+ };
+ if summaries.next().is_some() {
+ bail!(
+ "patch for `{}` in `{}` resolved to more than one candidate",
+ dep.name(),
+ url
+ )
}
- };
- if summaries.next().is_some() {
- bail!("patch for `{}` in `{}` resolved to more than one candidate",
- dep.name(), url)
- }
- if summary.package_id().source_id().url() == url {
- bail!("patch for `{}` in `{}` points to the same source, but \
- patches must point to different sources",
- dep.name(), url);
- }
- Ok(summary)
- }).collect::<CargoResult<Vec<_>>>().chain_err(|| {
- format_err!("failed to resolve patches for `{}`", url)
- })?;
+ if summary.package_id().source_id().url() == url {
+ bail!(
+ "patch for `{}` in `{}` points to the same source, but \
+ patches must point to different sources",
+ dep.name(),
+ url
+ );
+ }
+ Ok(summary)
+ })
+ .collect::<CargoResult<Vec<_>>>()
+ .chain_err(|| format_err!("failed to resolve patches for `{}`", url))?;
// Note that we do not use `lock` here to lock summaries! That step
// happens later once `lock_patches` is invoked. In the meantime though
// we want to fill in the `patches_available` map (later used in the
// `lock` method) and otherwise store the unlocked summaries in
// `patches` to get locked in a future call to `lock_patches`.
- let ids = unlocked_summaries.iter()
+ let ids = unlocked_summaries
+ .iter()
.map(|s| s.package_id())
.cloned()
.collect();
// Ensure the source has fetched all necessary remote data.
let _p = profile::start(format!("updating: {}", source_id));
self.sources.get_mut(source_id).unwrap().update()
- })().chain_err(|| format_err!("Unable to update {}", source_id))?;
+ })()
+ .chain_err(|| format_err!("Unable to update {}", source_id))?;
Ok(())
}
- fn query_overrides(&mut self, dep: &Dependency)
- -> CargoResult<Option<Summary>> {
+ fn query_overrides(&mut self, dep: &Dependency) -> CargoResult<Option<Summary>> {
for s in self.overrides.iter() {
let src = self.sources.get_mut(s).unwrap();
let dep = Dependency::new_override(&*dep.name(), s);
let mut results = src.query_vec(&dep)?;
if !results.is_empty() {
- return Ok(Some(results.remove(0)))
+ return Ok(Some(results.remove(0)));
}
}
Ok(None)
lock(&self.locked, &self.patches_available, summary)
}
- fn warn_bad_override(&self,
- override_summary: &Summary,
- real_summary: &Summary) -> CargoResult<()> {
+ fn warn_bad_override(
+ &self,
+ override_summary: &Summary,
+ real_summary: &Summary,
+ ) -> CargoResult<()> {
let mut real_deps = real_summary.dependencies().iter().collect::<Vec<_>>();
let boilerplate = "\
for dep in override_summary.dependencies() {
if let Some(i) = real_deps.iter().position(|d| dep == *d) {
real_deps.remove(i);
- continue
+ continue;
}
- let msg = format!("\
- path override for crate `{}` has altered the original list of\n\
- dependencies; the dependency on `{}` was either added or\n\
- modified to not match the previously resolved version\n\n\
- {}", override_summary.package_id().name(), dep.name(), boilerplate);
+ let msg = format!(
+ "\
+ path override for crate `{}` has altered the original list of\n\
+ dependencies; the dependency on `{}` was either added or\n\
+ modified to not match the previously resolved version\n\n\
+ {}",
+ override_summary.package_id().name(),
+ dep.name(),
+ boilerplate
+ );
self.source_config.config().shell().warn(&msg)?;
- return Ok(())
+ return Ok(());
}
if let Some(id) = real_deps.get(0) {
- let msg = format!("\
+ let msg = format!(
+ "\
path override for crate `{}` has altered the original list of
dependencies; the dependency on `{}` was removed\n\n
- {}", override_summary.package_id().name(), id.name(), boilerplate);
+ {}",
+ override_summary.package_id().name(),
+ id.name(),
+ boilerplate
+ );
self.source_config.config().shell().warn(&msg)?;
- return Ok(())
+ return Ok(());
}
Ok(())
}
impl<'cfg> Registry for PackageRegistry<'cfg> {
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()> {
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
assert!(self.patches_locked);
let (override_summary, n, to_warn) = {
// Look for an override and get ready to query the real source.
// what we really care about is the name/version match.
let mut patches = Vec::<Summary>::new();
if let Some(extra) = self.patches.get(dep.source_id().url()) {
- patches.extend(extra.iter().filter(|s| {
- dep.matches_ignoring_source(s)
- }).cloned());
+ patches.extend(
+ extra
+ .iter()
+ .filter(|s| dep.matches_ignoring_source(s))
+ .cloned(),
+ );
}
// A crucial feature of the `[patch]` feature is that we *don't*
Some(summary) => (summary, 1, Some(patch)),
None => {
f(patch);
- return Ok(())
+ return Ok(());
}
}
} else {
if !patches.is_empty() {
- debug!("found {} patches with an unlocked dep on `{}` at {} \
- with `{}`, \
- looking at sources", patches.len(),
- dep.name(),
- dep.source_id(),
- dep.version_req());
+ debug!(
+ "found {} patches with an unlocked dep on `{}` at {} \
+ with `{}`, \
+ looking at sources",
+ patches.len(),
+ dep.name(),
+ dep.source_id(),
+ dep.version_req()
+ );
}
// Ensure the requested source_id is loaded
- self.ensure_loaded(dep.source_id(), Kind::Normal).chain_err(|| {
- format_err!("failed to load source for a dependency \
- on `{}`", dep.name())
- })?;
+ self.ensure_loaded(dep.source_id(), Kind::Normal)
+ .chain_err(|| {
+ format_err!(
+ "failed to load source for a dependency \
+ on `{}`",
+ dep.name()
+ )
+ })?;
let source = self.sources.get_mut(dep.source_id());
match (override_summary, source) {
for patch in patches.iter() {
let patch = patch.package_id().version();
if summary.package_id().version() == patch {
- return
+ return;
}
}
f(lock(locked, all_patches, summary))
- })
+ });
}
// If we have an override summary then we query the source
}
}
-fn lock(locked: &LockedMap,
- patches: &HashMap<Url, Vec<PackageId>>,
- summary: Summary) -> Summary {
- let pair = locked.get(summary.source_id()).and_then(|map| {
- map.get(&*summary.name())
- }).and_then(|vec| {
- vec.iter().find(|&&(ref id, _)| id == summary.package_id())
- });
+fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Summary) -> Summary {
+ let pair = locked
+ .get(summary.source_id())
+ .and_then(|map| map.get(&*summary.name()))
+ .and_then(|vec| vec.iter().find(|&&(ref id, _)| id == summary.package_id()));
trace!("locking summary of {}", summary.package_id());
None => summary,
};
summary.map_dependencies(|dep| {
- trace!("\t{}/{}/{}", dep.name(), dep.version_req(),
- dep.source_id());
+ trace!("\t{}/{}/{}", dep.name(), dep.version_req(), dep.source_id());
// If we've got a known set of overrides for this summary, then
// one of a few cases can arise:
trace!("\tfirst hit on {}", locked);
let mut dep = dep.clone();
dep.lock_to(locked);
- return dep
+ return dep;
}
}
// If this dependency did not have a locked version, then we query
// all known locked packages to see if they match this dependency.
// If anything does then we lock it to that and move on.
- let v = locked.get(dep.source_id()).and_then(|map| {
- map.get(&*dep.name())
- }).and_then(|vec| {
- vec.iter().find(|&&(ref id, _)| dep.matches_id(id))
- });
+ let v = locked
+ .get(dep.source_id())
+ .and_then(|map| map.get(&*dep.name()))
+ .and_then(|vec| vec.iter().find(|&&(ref id, _)| dep.matches_id(id)));
if let Some(&(ref id, _)) = v {
trace!("\tsecond hit on {}", id);
let mut dep = dep.clone();
dep.lock_to(id);
- return dep
+ return dep;
}
// Finally we check to see if any registered patches correspond to
let v = patches.get(dep.source_id().url()).map(|vec| {
let dep2 = dep.clone();
let mut iter = vec.iter().filter(move |p| {
- dep2.name() == p.name() &&
- dep2.version_req().matches(p.version())
+ dep2.name() == p.name() && dep2.version_req().matches(p.version())
});
(iter.next(), iter)
});
if let Some((Some(patch_id), mut remaining)) = v {
assert!(remaining.next().is_none());
let patch_source = patch_id.source_id();
- let patch_locked = locked.get(patch_source).and_then(|m| {
- m.get(&*patch_id.name())
- }).map(|list| {
- list.iter().any(|&(ref id, _)| id == patch_id)
- }).unwrap_or(false);
+ let patch_locked = locked
+ .get(patch_source)
+ .and_then(|m| m.get(&*patch_id.name()))
+ .map(|list| list.iter().any(|&(ref id, _)| id == patch_id))
+ .unwrap_or(false);
if patch_locked {
trace!("\tthird hit on {}", patch_id);
let req = VersionReq::exact(patch_id.version());
let mut dep = dep.clone();
dep.set_version_req(req);
- return dep
+ return dep;
}
}
#[cfg(test)]
pub mod test {
- use core::{Summary, Registry, Dependency};
+ use core::{Dependency, Registry, Summary};
use util::CargoResult;
pub struct RegistryBuilder {
summaries: Vec<Summary>,
- overrides: Vec<Summary>
+ overrides: Vec<Summary>,
}
impl RegistryBuilder {
pub fn new() -> RegistryBuilder {
- RegistryBuilder { summaries: vec![], overrides: vec![] }
+ RegistryBuilder {
+ summaries: vec![],
+ overrides: vec![],
+ }
}
pub fn summary(mut self, summary: Summary) -> RegistryBuilder {
}
fn query_overrides(&self, dep: &Dependency) -> Vec<Summary> {
- self.overrides.iter()
+ self.overrides
+ .iter()
.filter(|s| s.name() == dep.name())
.map(|s| s.clone())
.collect()
}
impl Registry for RegistryBuilder {
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()> {
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
debug!("querying; dep={:?}", dep);
let overrides = self.query_overrides(dep);
-use std::collections::{HashMap, HashSet, BTreeMap};
+use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt;
use std::str::FromStr;
use serde::ser;
use serde::de;
-use core::{Package, PackageId, SourceId, Workspace, Dependency};
-use util::{Graph, Config, internal};
-use util::errors::{CargoResult, CargoResultExt, CargoError};
+use core::{Dependency, Package, PackageId, SourceId, Workspace};
+use util::{internal, Config, Graph};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
use super::Resolve;
root: Option<EncodableDependency>,
metadata: Option<Metadata>,
- #[serde(default, skip_serializing_if = "Patch::is_empty")]
- patch: Patch,
+ #[serde(default, skip_serializing_if = "Patch::is_empty")] patch: Patch,
}
#[derive(Serialize, Deserialize, Debug, Default)]
};
if !all_pkgs.insert(enc_id.clone()) {
- return Err(internal(format!("package `{}` is specified twice in the lockfile",
- pkg.name)));
+ return Err(internal(format!(
+ "package `{}` is specified twice in the lockfile",
+ pkg.name
+ )));
}
let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
// We failed to find a local package in the workspace.
// It must have been removed and should be ignored.
None => {
- debug!("path dependency now missing {} v{}",
- pkg.name,
- pkg.version);
- continue
+ debug!("path dependency now missing {} v{}", pkg.name, pkg.version);
+ continue;
}
- Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?
+ Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?,
};
assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none())
// no longer a member of the workspace.
Ok(None)
} else {
- Err(internal(format!("package `{}` is specified as a dependency, \
- but is missing from the package list", enc_id)))
- }
+ Err(internal(format!(
+ "package `{}` is specified as a dependency, \
+ but is missing from the package list",
+ enc_id
+ )))
+ },
}
};
for &(ref id, pkg) in live_pkgs.values() {
let deps = match pkg.dependencies {
Some(ref deps) => deps,
- None => continue
+ None => continue,
};
for edge in deps.iter() {
for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
to_remove.push(k.to_string());
let k = &k[prefix.len()..];
- let enc_id: EncodablePackageId = k.parse().chain_err(|| {
- internal("invalid encoding of checksum in lockfile")
- })?;
+ let enc_id: EncodablePackageId = k.parse()
+ .chain_err(|| internal("invalid encoding of checksum in lockfile"))?;
let id = match lookup_id(&enc_id) {
Ok(Some(id)) => id,
_ => continue,
// such as `cargo install` with a lock file from a remote dependency. In
// that case we don't need to fixup any path dependencies (as they're not
// actually path dependencies any more), so we ignore them.
- let members = ws.members().filter(|p| {
- p.package_id().source_id().is_path()
- }).collect::<Vec<_>>();
+ let members = ws.members()
+ .filter(|p| p.package_id().source_id().is_path())
+ .collect::<Vec<_>>();
let mut ret = HashMap::new();
let mut visited = HashSet::new();
for member in members.iter() {
- ret.insert(member.package_id().name().to_string(),
- member.package_id().source_id().clone());
+ ret.insert(
+ member.package_id().name().to_string(),
+ member.package_id().source_id().clone(),
+ );
visited.insert(member.package_id().source_id().clone());
}
for member in members.iter() {
build_pkg(member, ws.config(), &mut ret, &mut visited);
}
- for deps in ws.root_patch().values() {
+ for deps in ws.root_patch().values() {
for dep in deps {
build_dep(dep, ws.config(), &mut ret, &mut visited);
}
return ret;
- fn build_pkg(pkg: &Package,
- config: &Config,
- ret: &mut HashMap<String, SourceId>,
- visited: &mut HashSet<SourceId>) {
+ fn build_pkg(
+ pkg: &Package,
+ config: &Config,
+ ret: &mut HashMap<String, SourceId>,
+ visited: &mut HashSet<SourceId>,
+ ) {
for dep in pkg.dependencies() {
build_dep(dep, config, ret, visited);
}
}
- fn build_dep(dep: &Dependency,
- config: &Config,
- ret: &mut HashMap<String, SourceId>,
- visited: &mut HashSet<SourceId>) {
+ fn build_dep(
+ dep: &Dependency,
+ config: &Config,
+ ret: &mut HashMap<String, SourceId>,
+ visited: &mut HashSet<SourceId>,
+ ) {
let id = dep.source_id();
if visited.contains(id) || !id.is_path() {
- return
+ return;
}
let path = match id.url().to_file_path() {
Ok(p) => p.join("Cargo.toml"),
Ok(p) => p,
Err(_) => return,
};
- ret.insert(pkg.name().to_string(),
- pkg.package_id().source_id().clone());
+ ret.insert(pkg.name().to_string(), pkg.package_id().source_id().clone());
visited.insert(pkg.package_id().source_id().clone());
build_pkg(&pkg, config, ret, visited);
}
pub struct EncodablePackageId {
name: String,
version: String,
- source: Option<SourceId>
+ source: Option<SourceId>,
}
impl fmt::Display for EncodablePackageId {
fn from_str(s: &str) -> CargoResult<EncodablePackageId> {
let mut s = s.splitn(3, ' ');
let name = s.next().unwrap();
- let version = s.next().ok_or_else(|| {
- internal("invalid serialized PackageId")
- })?;
+ let version = s.next()
+ .ok_or_else(|| internal("invalid serialized PackageId"))?;
let source_id = match s.next() {
Some(s) => {
if s.starts_with('(') && s.ends_with(')') {
Ok(EncodablePackageId {
name: name.to_string(),
version: version.to_string(),
- source: source_id
+ source: source_id,
})
}
}
impl ser::Serialize for EncodablePackageId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
s.collect_str(self)
}
impl<'de> de::Deserialize<'de> for EncodablePackageId {
fn deserialize<D>(d: D) -> Result<EncodablePackageId, D::Error>
- where D: de::Deserializer<'de>,
+ where
+ D: de::Deserializer<'de>,
{
String::deserialize(d).and_then(|string| {
- string.parse::<EncodablePackageId>()
- .map_err(de::Error::custom)
+ string
+ .parse::<EncodablePackageId>()
+ .map_err(de::Error::custom)
})
}
}
impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
let mut ids: Vec<&PackageId> = self.resolve.graph.iter().collect();
ids.sort();
- let encodable = ids.iter().filter_map(|&id| {
- Some(encodable_resolve_node(id, self.resolve))
- }).collect::<Vec<_>>();
+ let encodable = ids.iter()
+ .filter_map(|&id| Some(encodable_resolve_node(id, self.resolve)))
+ .collect::<Vec<_>>();
let mut metadata = self.resolve.metadata.clone();
None => "<none>",
};
let id = encodable_package_id(id);
- metadata.insert(format!("checksum {}", id.to_string()),
- checksum.to_string());
+ metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string());
}
- let metadata = if metadata.is_empty() { None } else { Some(metadata) };
+ let metadata = if metadata.is_empty() {
+ None
+ } else {
+ Some(metadata)
+ };
let patch = Patch {
- unused: self.resolve.unused_patches().iter().map(|id| {
- EncodableDependency {
+ unused: self.resolve
+ .unused_patches()
+ .iter()
+ .map(|id| EncodableDependency {
name: id.name().to_string(),
version: id.version().to_string(),
source: encode_source(id.source_id()),
dependencies: None,
replace: None,
- }
- }).collect(),
+ })
+ .collect(),
};
EncodableResolve {
package: Some(encodable),
}
}
-fn encodable_resolve_node(id: &PackageId, resolve: &Resolve)
- -> EncodableDependency {
+fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) -> EncodableDependency {
let (replace, deps) = match resolve.replacement(id) {
- Some(id) => {
- (Some(encodable_package_id(id)), None)
- }
+ Some(id) => (Some(encodable_package_id(id)), None),
None => {
- let mut deps = resolve.graph.edges(id)
- .into_iter().flat_map(|a| a)
- .map(encodable_package_id)
- .collect::<Vec<_>>();
+ let mut deps = resolve
+ .graph
+ .edges(id)
+ .into_iter()
+ .flat_map(|a| a)
+ .map(encodable_package_id)
+ .collect::<Vec<_>>();
deps.sort();
(None, Some(deps))
}
//! over the place.
use std::cmp::Ordering;
-use std::collections::{HashSet, HashMap, BinaryHeap, BTreeMap};
+use std::collections::{BTreeMap, BinaryHeap, HashMap, HashSet};
use std::fmt;
use std::iter::FromIterator;
use std::ops::Range;
use std::rc::Rc;
-use std::time::{Instant, Duration};
+use std::time::{Duration, Instant};
use semver;
use url::Url;
-use core::{PackageId, Registry, SourceId, Summary, Dependency};
+use core::{Dependency, PackageId, Registry, SourceId, Summary};
use core::PackageIdSpec;
use core::interning::InternedString;
use util::config::Config;
use util::Graph;
-use util::errors::{CargoResult, CargoError};
+use util::errors::{CargoError, CargoResult};
use util::profile;
-use util::graph::{Nodes, Edges};
+use util::graph::{Edges, Nodes};
-pub use self::encode::{EncodableResolve, EncodableDependency, EncodablePackageId};
+pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
pub use self::encode::{Metadata, WorkspaceResolve};
mod encode;
impl<'r> Method<'r> {
pub fn split_features(features: &[String]) -> Vec<String> {
- features.iter()
+ features
+ .iter()
.flat_map(|s| s.split_whitespace())
.flat_map(|s| s.split(','))
.filter(|s| !s.is_empty())
pub fn path_to_top<'a>(&'a self, pkg: &'a PackageId) -> Vec<&'a PackageId> {
self.graph.path_to_top(pkg)
}
- pub fn register_used_patches(&mut self,
- patches: &HashMap<Url, Vec<Summary>>) {
+ pub fn register_used_patches(&mut self, patches: &HashMap<Url, Vec<Summary>>) {
for summary in patches.values().flat_map(|v| v) {
if self.iter().any(|id| id == summary.package_id()) {
- continue
+ continue;
}
self.unused_patches.push(summary.package_id().clone());
}
for (id, cksum) in previous.checksums.iter() {
if let Some(mine) = self.checksums.get(id) {
if mine == cksum {
- continue
+ continue;
}
// If the previous checksum wasn't calculated, the current
// desires stronger checksum guarantees than can be afforded
// elsewhere.
if cksum.is_none() {
- bail!("\
+ bail!(
+ "\
checksum for `{}` was not previously calculated, but a checksum could now \
be calculated
* newer Cargo implementations know how to checksum this source, but this
older implementation does not
* the lock file is corrupt
-", id, id.source_id())
+",
+ id,
+ id.source_id()
+ )
// If our checksum hasn't been calculated, then it could mean
// that future Cargo figured out how to checksum something or
// more realistically we were overridden with a source that does
// not have checksums.
} else if mine.is_none() {
- bail!("\
+ bail!(
+ "\
checksum for `{}` could not be calculated, but a checksum is listed in \
the existing lock file
* the lock file is corrupt
unable to verify that `{0}` is the same as when the lockfile was generated
-", id, id.source_id())
+",
+ id,
+ id.source_id()
+ )
// If the checksums aren't equal, and neither is None, then they
// must both be Some, in which case the checksum now differs.
// That's quite bad!
} else {
- bail!("\
+ bail!(
+ "\
checksum for `{}` changed between lock files
this could be indicative of a few possible errors:
* the source itself may be corrupt in one way or another
unable to verify that `{0}` is the same as when the lockfile was generated
-", id);
+",
+ id
+ );
}
}
}
}
pub fn deps(&self, pkg: &PackageId) -> Deps {
- Deps { edges: self.graph.edges(pkg), resolve: self }
+ Deps {
+ edges: self.graph.edges(pkg),
+ resolve: self,
+ }
}
pub fn deps_not_replaced(&self, pkg: &PackageId) -> DepsNotReplaced {
- DepsNotReplaced { edges: self.graph.edges(pkg) }
+ DepsNotReplaced {
+ edges: self.graph.edges(pkg),
+ }
}
pub fn replacement(&self, pkg: &PackageId) -> Option<&PackageId> {
type Item = &'a PackageId;
fn next(&mut self) -> Option<&'a PackageId> {
- self.edges.as_mut()
+ self.edges
+ .as_mut()
.and_then(|e| e.next())
.map(|id| self.resolve.replacement(id).unwrap_or(id))
}
}
struct RcList<T> {
- head: Option<Rc<(T, RcList<T>)>>
+ head: Option<Rc<(T, RcList<T>)>>,
}
impl<T> RcList<T> {
}
fn push(&mut self, data: T) {
- let node = Rc::new((data, RcList { head: self.head.take() }));
+ let node = Rc::new((
+ data,
+ RcList {
+ head: self.head.take(),
+ },
+ ));
self.head = Some(node);
}
}
// Not derived to avoid `T: Clone`
impl<T> Clone for RcList<T> {
fn clone(&self) -> RcList<T> {
- RcList { head: self.head.clone() }
+ RcList {
+ head: self.head.clone(),
+ }
}
}
type Activations = HashMap<(InternedString, SourceId), Rc<Vec<Summary>>>;
/// Builds the list of all packages required to build the first argument.
-pub fn resolve(summaries: &[(Summary, Method)],
- replacements: &[(PackageIdSpec, Dependency)],
- registry: &mut Registry,
- config: Option<&Config>,
- print_warnings: bool) -> CargoResult<Resolve> {
+pub fn resolve(
+ summaries: &[(Summary, Method)],
+ replacements: &[(PackageIdSpec, Dependency)],
+ registry: &mut Registry,
+ config: Option<&Config>,
+ print_warnings: bool,
+) -> CargoResult<Resolve> {
let cx = Context {
resolve_graph: RcList::new(),
resolve_features: HashMap::new(),
warnings: RcList::new(),
};
let _p = profile::start("resolving");
- let cx = activate_deps_loop(cx, &mut RegistryQueryer::new(registry, replacements), summaries, config)?;
+ let cx = activate_deps_loop(
+ cx,
+ &mut RegistryQueryer::new(registry, replacements),
+ summaries,
+ config,
+ )?;
let mut resolve = Resolve {
graph: cx.graph(),
checksums: HashMap::new(),
metadata: BTreeMap::new(),
replacements: cx.resolve_replacements(),
- features: cx.resolve_features.iter().map(|(k, v)| {
- (k.clone(), v.iter().map(|x| x.to_string()).collect())
- }).collect(),
+ features: cx.resolve_features
+ .iter()
+ .map(|(k, v)| (k.clone(), v.iter().map(|x| x.to_string()).collect()))
+ .collect(),
unused_patches: Vec::new(),
};
- for summary in cx.activations.values()
- .flat_map(|v| v.iter()) {
+ for summary in cx.activations.values().flat_map(|v| v.iter()) {
let cksum = summary.checksum().map(|s| s.to_string());
- resolve.checksums.insert(summary.package_id().clone(), cksum);
+ resolve
+ .checksums
+ .insert(summary.package_id().clone(), cksum);
}
check_cycles(&resolve, &cx.activations)?;
/// the dependencies of the package will be determined by the `method` provided.
/// If `candidate` was activated, this function returns the dependency frame to
/// iterate through next.
-fn activate(cx: &mut Context,
- registry: &mut RegistryQueryer,
- parent: Option<&Summary>,
- candidate: Candidate,
- method: &Method)
- -> ActivateResult<Option<(DepsFrame, Duration)>> {
+fn activate(
+ cx: &mut Context,
+ registry: &mut RegistryQueryer,
+ parent: Option<&Summary>,
+ candidate: Candidate,
+ method: &Method,
+) -> ActivateResult<Option<(DepsFrame, Duration)>> {
if let Some(parent) = parent {
- cx.resolve_graph.push(GraphNode::Link(parent.package_id().clone(),
- candidate.summary.package_id().clone()));
+ cx.resolve_graph.push(GraphNode::Link(
+ parent.package_id().clone(),
+ candidate.summary.package_id().clone(),
+ ));
}
let activated = cx.flag_activated(&candidate.summary, method)?;
let candidate = match candidate.replace {
Some(replace) => {
- cx.resolve_replacements.push((candidate.summary.package_id().clone(),
- replace.package_id().clone()));
+ cx.resolve_replacements.push((
+ candidate.summary.package_id().clone(),
+ replace.package_id().clone(),
+ ));
if cx.flag_activated(&replace, method)? && activated {
return Ok(None);
}
- trace!("activating {} (replacing {})", replace.package_id(),
- candidate.summary.package_id());
+ trace!(
+ "activating {} (replacing {})",
+ replace.package_id(),
+ candidate.summary.package_id()
+ );
replace
}
None => {
if activated {
- return Ok(None)
+ return Ok(None);
}
trace!("activating {}", candidate.summary.package_id());
candidate.summary
}
}
-impl<T> Iterator for RcVecIter<T> where T: Clone {
+impl<T> Iterator for RcVecIter<T>
+where
+ T: Clone,
+{
type Item = (usize, T);
fn next(&mut self) -> Option<(usize, T)> {
- self.rest.next().and_then(|i| {
- self.vec.get(i).map(|val| (i, val.clone()))
- })
+ self.rest
+ .next()
+ .and_then(|i| self.vec.get(i).map(|val| (i, val.clone())))
}
fn size_hint(&self) -> (usize, Option<usize>) {
/// number of candidates at the front, so we just return the number of
/// candidates in that entry.
fn min_candidates(&self) -> usize {
- self.remaining_siblings.clone().next().map(|(_, (_, candidates, _))| {
- candidates.len()
- }).unwrap_or(0)
+ self.remaining_siblings
+ .clone()
+ .next()
+ .map(|(_, (_, candidates, _))| candidates.len())
+ .unwrap_or(0)
}
}
impl PartialEq for DepsFrame {
fn eq(&self, other: &DepsFrame) -> bool {
- self.just_for_error_messages == other.just_for_error_messages &&
- self.min_candidates() == other.min_candidates()
+ self.just_for_error_messages == other.just_for_error_messages
+ && self.min_candidates() == other.min_candidates()
}
}
impl Ord for DepsFrame {
fn cmp(&self, other: &DepsFrame) -> Ordering {
- self.just_for_error_messages.cmp(&other.just_for_error_messages).then_with(||
+ self.just_for_error_messages
+ .cmp(&other.just_for_error_messages)
+ .then_with(||
// the frame with the sibling that has the least number of candidates
// needs to get bubbled up to the top of the heap we use below, so
// reverse comparison here.
- self.min_candidates().cmp(&other.min_candidates()).reverse()
- )
+ self.min_candidates().cmp(&other.min_candidates()).reverse())
}
}
}
impl<'a> RegistryQueryer<'a> {
- fn new(registry: &'a mut Registry, replacements: &'a [(PackageIdSpec, Dependency)],) -> Self {
+ fn new(registry: &'a mut Registry, replacements: &'a [(PackageIdSpec, Dependency)]) -> Self {
RegistryQueryer {
registry,
replacements,
let mut ret = Vec::new();
self.registry.query(dep, &mut |s| {
- ret.push(Candidate { summary: s, replace: None });
+ ret.push(Candidate {
+ summary: s,
+ replace: None,
+ });
})?;
for candidate in ret.iter_mut() {
let summary = &candidate.summary;
- let mut potential_matches = self.replacements.iter()
+ let mut potential_matches = self.replacements
+ .iter()
.filter(|&&(ref spec, _)| spec.matches(summary.package_id()));
let &(ref spec, ref dep) = match potential_matches.next() {
let mut summaries = self.registry.query_vec(dep)?.into_iter();
let s = summaries.next().ok_or_else(|| {
- format_err!("no matching package for override `{}` found\n\
- location searched: {}\n\
- version required: {}",
- spec, dep.source_id(), dep.version_req())
+ format_err!(
+ "no matching package for override `{}` found\n\
+ location searched: {}\n\
+ version required: {}",
+ spec,
+ dep.source_id(),
+ dep.version_req()
+ )
})?;
let summaries = summaries.collect::<Vec<_>>();
if !summaries.is_empty() {
- let bullets = summaries.iter().map(|s| {
- format!(" * {}", s.package_id())
- }).collect::<Vec<_>>();
- bail!("the replacement specification `{}` matched \
- multiple packages:\n * {}\n{}", spec, s.package_id(),
- bullets.join("\n"));
+ let bullets = summaries
+ .iter()
+ .map(|s| format!(" * {}", s.package_id()))
+ .collect::<Vec<_>>();
+ bail!(
+ "the replacement specification `{}` matched \
+ multiple packages:\n * {}\n{}",
+ spec,
+ s.package_id(),
+ bullets.join("\n")
+ );
}
// The dependency should be hard-coded to have the same name and an
// Make sure no duplicates
if let Some(&(ref spec, _)) = potential_matches.next() {
- bail!("overlapping replacement specifications found:\n\n \
- * {}\n * {}\n\nboth specifications match: {}",
- matched_spec, spec, summary.package_id());
+ bail!(
+ "overlapping replacement specifications found:\n\n \
+ * {}\n * {}\n\nboth specifications match: {}",
+ matched_spec,
+ spec,
+ summary.package_id()
+ );
}
for dep in summary.dependencies() {
// When we attempt versions for a package, we'll want to start at
// the maximum version and work our way down.
- ret.sort_unstable_by(|a, b| {
- b.summary.version().cmp(a.summary.version())
- });
+ ret.sort_unstable_by(|a, b| b.summary.version().cmp(a.summary.version()));
let out = Rc::new(ret);
// `foo=0.8.1` AND `foo=0.9.4` are activated" (better data structures are welcome but this works for now.)
// This is used to make sure we don't queue work we know will fail.
// See the discussion in https://github.com/rust-lang/cargo/pull/5168 for why this is so important
- let mut past_conflicting_activations: HashMap<Dependency, Vec<HashMap<PackageId, ConflictReason>>> = HashMap::new();
+ let mut past_conflicting_activations: HashMap<
+ Dependency,
+ Vec<HashMap<PackageId, ConflictReason>>,
+ > = HashMap::new();
for &(ref summary, ref method) in summaries {
debug!("initial activation: {}", summary.package_id());
let candidate = Candidate {
Ok(Some((frame, _))) => remaining_deps.push(frame),
Ok(None) => (),
Err(ActivateError::Error(e)) => return Err(e),
- Err(ActivateError::Conflict(_, _)) => panic!("bad error from activate")
+ Err(ActivateError::Conflict(_, _)) => panic!("bad error from activate"),
}
}
let (mut parent, (mut cur, (mut dep, candidates, mut features))) = frame;
assert!(!remaining_deps.is_empty());
- trace!("{}[{}]>{} {} candidates", parent.name(), cur, dep.name(), candidates.len());
- trace!("{}[{}]>{} {} prev activations", parent.name(), cur, dep.name(), cx.prev_active(&dep).len());
+ trace!(
+ "{}[{}]>{} {} candidates",
+ parent.name(),
+ cur,
+ dep.name(),
+ candidates.len()
+ );
+ trace!(
+ "{}[{}]>{} {} prev activations",
+ parent.name(),
+ cur,
+ dep.name(),
+ cx.prev_active(&dep).len()
+ );
let just_here_for_the_error_messages = just_here_for_the_error_messages
&& past_conflicting_activations
if !just_here_for_the_error_messages && !backtracked {
// if `just_here_for_the_error_messages` then skip as it is already known to be bad.
// if `backtracked` then `conflicting_activations` may not be complete so skip.
- let past = past_conflicting_activations.entry(dep.clone()).or_insert_with(Vec::new);
+ let past = past_conflicting_activations
+ .entry(dep.clone())
+ .or_insert_with(Vec::new);
if !past.contains(&conflicting_activations) {
- trace!("{}[{}]>{} adding a skip {:?}", parent.name(), cur, dep.name(), conflicting_activations);
+ trace!(
+ "{}[{}]>{} adding a skip {:?}",
+ parent.name(),
+ cur,
+ dep.name(),
+ conflicting_activations
+ );
past.push(conflicting_activations.clone());
}
}
- find_candidate(
- &mut backtrack_stack,
- &parent,
- &conflicting_activations,
- ).map(|(candidate, has_another, frame)| {
- // This resets the `remaining_deps` to
- // their state at the found level of the `backtrack_stack`.
- cur = frame.cur;
- cx = frame.context_backup;
- remaining_deps = frame.deps_backup;
- remaining_candidates = frame.remaining_candidates;
- parent = frame.parent;
- dep = frame.dep;
- features = frame.features;
- conflicting_activations = frame.conflicting_activations;
- backtracked = true;
- (candidate, has_another)
- }).ok_or_else(|| {
- activation_error(
- &cx,
- registry.registry,
- &parent,
- &dep,
- &conflicting_activations,
- &candidates,
- config,
- )
- })
+ find_candidate(&mut backtrack_stack, &parent, &conflicting_activations)
+ .map(|(candidate, has_another, frame)| {
+ // This resets the `remaining_deps` to
+ // their state at the found level of the `backtrack_stack`.
+ cur = frame.cur;
+ cx = frame.context_backup;
+ remaining_deps = frame.deps_backup;
+ remaining_candidates = frame.remaining_candidates;
+ parent = frame.parent;
+ dep = frame.dep;
+ features = frame.features;
+ conflicting_activations = frame.conflicting_activations;
+ backtracked = true;
+ (candidate, has_another)
+ })
+ .ok_or_else(|| {
+ activation_error(
+ &cx,
+ registry.registry,
+ &parent,
+ &dep,
+ &conflicting_activations,
+ &candidates,
+ config,
+ )
+ })
})?;
if just_here_for_the_error_messages && !backtracked && has_another {
- continue
+ continue;
}
// We have a candidate. Clone a `BacktrackFrame`
all_features: false,
uses_default_features: dep.uses_default_features(),
};
- trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(), candidate.summary.version());
+ trace!(
+ "{}[{}]>{} trying {}",
+ parent.name(),
+ cur,
+ dep.name(),
+ candidate.summary.version()
+ );
let res = activate(&mut cx, registry, Some(&parent), candidate, &method);
successfully_activated = res.is_ok();
// but we may want to scrap it if it is not going to end well
let mut has_past_conflicting_dep = just_here_for_the_error_messages;
if !has_past_conflicting_dep {
- if let Some(conflicting) = frame.remaining_siblings.clone().filter_map(|(_, (deb, _, _))| {
- past_conflicting_activations.get(&deb).and_then(|past_bad| {
- // for each dependency check all of its cashed conflicts
- past_bad.iter().find(|conflicting| {
- conflicting
+ if let Some(conflicting) = frame
+ .remaining_siblings
+ .clone()
+ .filter_map(|(_, (deb, _, _))| {
+ past_conflicting_activations.get(&deb).and_then(|past_bad| {
+ // for each dependency check all of its cashed conflicts
+ past_bad.iter().find(|conflicting| {
+ conflicting
.iter()
// note: a lot of redundant work in is_active for similar debs
.all(|(con, _)| cx.is_active(con))
+ })
})
})
- }).next() {
+ .next()
+ {
// if any of them match than it will just backtrack to us
// so let's save the effort.
conflicting_activations.extend(conflicting.clone());
// we have not activated ANY candidates and
// we are out of choices so add it to the cache
// so our parent will know that we don't work
- let past = past_conflicting_activations.entry(dep.clone()).or_insert_with(Vec::new);
+ let past = past_conflicting_activations
+ .entry(dep.clone())
+ .or_insert_with(Vec::new);
if !past.contains(&conflicting_activations) {
- trace!("{}[{}]>{} adding a meta-skip {:?}", parent.name(), cur, dep.name(), conflicting_activations);
+ trace!(
+ "{}[{}]>{} adding a meta-skip {:?}",
+ parent.name(),
+ cur,
+ dep.name(),
+ conflicting_activations
+ );
past.push(conflicting_activations.clone());
}
}
// if not has_another we we activate for the better error messages
frame.just_for_error_messages = has_past_conflicting_dep;
- if !has_past_conflicting_dep || (!has_another && (just_here_for_the_error_messages || find_candidate(
- &mut backtrack_stack.clone(),
- &parent,
- &conflicting_activations,
- ).is_none())) {
+ if !has_past_conflicting_dep
+ || (!has_another
+ && (just_here_for_the_error_messages
+ || find_candidate(
+ &mut backtrack_stack.clone(),
+ &parent,
+ &conflicting_activations,
+ ).is_none()))
+ {
remaining_deps.push(frame);
} else {
- trace!("{}[{}]>{} skipping {} ", parent.name(), cur, dep.name(), pid.version());
+ trace!(
+ "{}[{}]>{} skipping {} ",
+ parent.name(),
+ cur,
+ dep.name(),
+ pid.version()
+ );
successfully_activated = false;
}
deps_time += dur;
}
Ok(None) => (),
Err(ActivateError::Error(e)) => return Err(e),
- Err(ActivateError::Conflict(id, reason)) => { conflicting_activations.insert(id, reason); },
+ Err(ActivateError::Conflict(id, reason)) => {
+ conflicting_activations.insert(id, reason);
+ }
}
// Add an entry to the `backtrack_stack` so
conflicting_activations: &HashMap<PackageId, ConflictReason>,
) -> Option<(Candidate, bool, BacktrackFrame)> {
while let Some(mut frame) = backtrack_stack.pop() {
- let next= frame.remaining_candidates.next(frame.context_backup.prev_active(&frame.dep), &frame.context_backup.links);
+ let next = frame.remaining_candidates.next(
+ frame.context_backup.prev_active(&frame.dep),
+ &frame.context_backup.links,
+ );
if frame.context_backup.is_active(parent.package_id())
- && conflicting_activations
+ && conflicting_activations
.iter()
// note: a lot of redundant work in is_active for similar debs
.all(|(con, _)| frame.context_backup.is_active(con))
let dep_path = graph.path_to_top(pkgid);
let mut dep_path_desc = format!("package `{}`", dep_path[0]);
for dep in dep_path.iter().skip(1) {
- write!(dep_path_desc,
- "\n ... which is depended on by `{}`",
- dep).unwrap();
+ write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap();
}
dep_path_desc
}
-fn activation_error(cx: &Context,
- registry: &mut Registry,
- parent: &Summary,
- dep: &Dependency,
- conflicting_activations: &HashMap<PackageId, ConflictReason>,
- candidates: &[Candidate],
- config: Option<&Config>) -> CargoError {
+fn activation_error(
+ cx: &Context,
+ registry: &mut Registry,
+ parent: &Summary,
+ dep: &Dependency,
+ conflicting_activations: &HashMap<PackageId, ConflictReason>,
+ candidates: &[Candidate],
+ config: Option<&Config>,
+) -> CargoError {
let graph = cx.graph();
if !candidates.is_empty() {
let mut msg = format!("failed to select a version for `{}`.", dep.name());
msg.push_str("\nversions that meet the requirements `");
msg.push_str(&dep.version_req().to_string());
msg.push_str("` are: ");
- msg.push_str(&candidates.iter()
- .map(|v| v.summary.version())
- .map(|v| v.to_string())
- .collect::<Vec<_>>()
- .join(", "));
+ msg.push_str(&candidates
+ .iter()
+ .map(|v| v.summary.version())
+ .map(|v| v.to_string())
+ .collect::<Vec<_>>()
+ .join(", "));
let mut conflicting_activations: Vec<_> = conflicting_activations.iter().collect();
conflicting_activations.sort_unstable();
- let (links_errors, mut other_errors): (Vec<_>, Vec<_>) = conflicting_activations.drain(..).rev().partition(|&(_, r)| r.is_links());
+ let (links_errors, mut other_errors): (Vec<_>, Vec<_>) = conflicting_activations
+ .drain(..)
+ .rev()
+ .partition(|&(_, r)| r.is_links());
for &(p, r) in links_errors.iter() {
if let ConflictReason::Links(ref link) = *r {
msg.push_str(&describe_path(&graph, p));
}
- let (features_errors, other_errors): (Vec<_>, Vec<_>) = other_errors.drain(..).partition(|&(_, r)| r.is_missing_features());
+ let (features_errors, other_errors): (Vec<_>, Vec<_>) = other_errors
+ .drain(..)
+ .partition(|&(_, r)| r.is_missing_features());
for &(p, r) in features_errors.iter() {
if let ConflictReason::MissingFeatures(ref features) = *r {
}
if !other_errors.is_empty() {
- msg.push_str("\n\nall possible versions conflict with \
- previously selected packages.");
+ msg.push_str(
+ "\n\nall possible versions conflict with \
+ previously selected packages.",
+ );
}
for &(p, _) in other_errors.iter() {
msg.push_str(&*dep.name());
msg.push_str("` which could resolve this conflict");
- return format_err!("{}", msg)
+ return format_err!("{}", msg);
}
// Once we're all the way down here, we're definitely lost in the
Ok(candidates) => candidates,
Err(e) => return e,
};
- candidates.sort_unstable_by(|a, b| {
- b.version().cmp(a.version())
- });
+ candidates.sort_unstable_by(|a, b| b.version().cmp(a.version()));
let mut msg = if !candidates.is_empty() {
let versions = {
- let mut versions = candidates.iter().take(3).map(|cand| {
- cand.version().to_string()
- }).collect::<Vec<_>>();
+ let mut versions = candidates
+ .iter()
+ .take(3)
+ .map(|cand| cand.version().to_string())
+ .collect::<Vec<_>>();
if candidates.len() > 3 {
versions.push("...".into());
versions.join(", ")
};
- let mut msg = format!("no matching version `{}` found for package `{}`\n\
- location searched: {}\n\
- versions found: {}\n",
- dep.version_req(),
- dep.name(),
- dep.source_id(),
- versions);
+ let mut msg = format!(
+ "no matching version `{}` found for package `{}`\n\
+ location searched: {}\n\
+ versions found: {}\n",
+ dep.version_req(),
+ dep.name(),
+ dep.source_id(),
+ versions
+ );
msg.push_str("required by ");
msg.push_str(&describe_path(&graph, parent.package_id()));
// If we have a path dependency with a locked version, then this may
// indicate that we updated a sub-package and forgot to run `cargo
// update`. In this case try to print a helpful error!
- if dep.source_id().is_path()
- && dep.version_req().to_string().starts_with('=') {
- msg.push_str("\nconsider running `cargo update` to update \
- a path dependency's locked version");
+ if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') {
+ msg.push_str(
+ "\nconsider running `cargo update` to update \
+ a path dependency's locked version",
+ );
}
msg
} else {
- let mut msg = format!("no matching package named `{}` found\n\
- location searched: {}\n",
- dep.name(), dep.source_id());
+ let mut msg = format!(
+ "no matching package named `{}` found\n\
+ location searched: {}\n",
+ dep.name(),
+ dep.source_id()
+ );
msg.push_str("required by ");
msg.push_str(&describe_path(&graph, parent.package_id()));
if let Some(config) = config {
if config.cli_unstable().offline {
- msg.push_str("\nAs a reminder, you're using offline mode (-Z offline) \
- which can sometimes cause surprising resolution failures, \
- if this error is too confusing you may with to retry \
- without the offline flag.");
+ msg.push_str(
+ "\nAs a reminder, you're using offline mode (-Z offline) \
+ which can sometimes cause surprising resolution failures, \
+ if this error is too confusing you may with to retry \
+ without the offline flag.",
+ );
}
}
// Versions `a` and `b` are compatible if their left-most nonzero digit is the
// same.
fn compatible(a: &semver::Version, b: &semver::Version) -> bool {
- if a.major != b.major { return false }
- if a.major != 0 { return true }
- if a.minor != b.minor { return false }
- if a.minor != 0 { return true }
+ if a.major != b.major {
+ return false;
+ }
+ if a.major != 0 {
+ return true;
+ }
+ if a.minor != b.minor {
+ return false;
+ }
+ if a.minor != 0 {
+ return true;
+ }
a.patch == b.patch
}
fn require_crate_feature(&mut self, package: &'r str, feat: &'r str) {
self.used.insert(package);
- self.deps.entry(package)
+ self.deps
+ .entry(package)
.or_insert((false, Vec::new()))
- .1.push(feat.to_string());
+ .1
+ .push(feat.to_string());
}
fn seen(&mut self, feat: &'r str) -> bool {
if self.seen(feat) {
return Ok(());
}
- for f in self.summary.features().get(feat).expect("must be a valid feature") {
+ for f in self.summary
+ .features()
+ .get(feat)
+ .expect("must be a valid feature")
+ {
if f == feat {
- bail!("Cyclic feature dependency: feature `{}` depends on itself", feat);
+ bail!(
+ "Cyclic feature dependency: feature `{}` depends on itself",
+ feat
+ );
}
self.add_feature(f)?;
}
}
fn add_feature(&mut self, feat: &'r str) -> CargoResult<()> {
- if feat.is_empty() { return Ok(()) }
+ if feat.is_empty() {
+ return Ok(());
+ }
// If this feature is of the form `foo/bar`, then we just lookup package
// `foo` and enable its feature `bar`. Otherwise this feature is of the
/// Takes requested features for a single package from the input Method and
/// recurses to find all requested features, dependencies and requested
/// dependency features in a Requirements object, returning it to the resolver.
-fn build_requirements<'a, 'b: 'a>(s: &'a Summary, method: &'b Method)
- -> CargoResult<Requirements<'a>> {
+fn build_requirements<'a, 'b: 'a>(
+ s: &'a Summary,
+ method: &'b Method,
+) -> CargoResult<Requirements<'a>> {
let mut reqs = Requirements::new(s);
match *method {
- Method::Everything |
- Method::Required { all_features: true, .. } => {
+ Method::Everything
+ | Method::Required {
+ all_features: true, ..
+ } => {
for key in s.features().keys() {
reqs.require_feature(key)?;
}
reqs.require_dependency(dep.name().to_inner());
}
}
- Method::Required { features: requested_features, .. } => {
- for feat in requested_features.iter() {
- reqs.add_feature(feat)?;
- }
- }
+ Method::Required {
+ features: requested_features,
+ ..
+ } => for feat in requested_features.iter() {
+ reqs.add_feature(feat)?;
+ },
}
match *method {
- Method::Everything |
- Method::Required { uses_default_features: true, .. } => {
+ Method::Everything
+ | Method::Required {
+ uses_default_features: true,
+ ..
+ } => {
if s.features().get("default").is_some() {
reqs.require_feature("default")?;
}
}
- Method::Required { uses_default_features: false, .. } => {}
+ Method::Required {
+ uses_default_features: false,
+ ..
+ } => {}
}
Ok(reqs)
}
/// Activate this summary by inserting it into our list of known activations.
///
/// Returns true if this summary with the given method is already activated.
- fn flag_activated(&mut self,
- summary: &Summary,
- method: &Method) -> CargoResult<bool> {
+ fn flag_activated(&mut self, summary: &Summary, method: &Method) -> CargoResult<bool> {
let id = summary.package_id();
let prev = self.activations
- .entry((id.name(), id.source_id().clone()))
- .or_insert_with(||Rc::new(Vec::new()));
+ .entry((id.name(), id.source_id().clone()))
+ .or_insert_with(|| Rc::new(Vec::new()));
if !prev.iter().any(|c| c == summary) {
self.resolve_graph.push(GraphNode::Add(id.clone()));
if let Some(link) = summary.links() {
- ensure!(self.links.insert(link, id.clone()).is_none(),
- "Attempting to resolve a with more then one crate with the links={}. \n\
- This will not build as is. Consider rebuilding the .lock file.", &*link);
+ ensure!(
+ self.links.insert(link, id.clone()).is_none(),
+ "Attempting to resolve a with more then one crate with the links={}. \n\
+ This will not build as is. Consider rebuilding the .lock file.",
+ &*link
+ );
}
let mut inner: Vec<_> = (**prev).clone();
inner.push(summary.clone());
*prev = Rc::new(inner);
- return Ok(false)
+ return Ok(false);
}
debug!("checking if {} is already activated", summary.package_id());
let (features, use_default) = match *method {
- Method::Everything |
- Method::Required { all_features: true, .. } => return Ok(false),
- Method::Required { features, uses_default_features, .. } => {
- (features, uses_default_features)
- }
+ Method::Everything
+ | Method::Required {
+ all_features: true, ..
+ } => return Ok(false),
+ Method::Required {
+ features,
+ uses_default_features,
+ ..
+ } => (features, uses_default_features),
};
let has_default_feature = summary.features().contains_key("default");
Ok(match self.resolve_features.get(id) {
Some(prev) => {
- features.iter().all(|f| prev.contains(&InternedString::new(f))) &&
- (!use_default || prev.contains(&InternedString::new("default")) ||
- !has_default_feature)
+ features
+ .iter()
+ .all(|f| prev.contains(&InternedString::new(f)))
+ && (!use_default || prev.contains(&InternedString::new("default"))
+ || !has_default_feature)
}
- None => features.is_empty() && (!use_default || !has_default_feature)
+ None => features.is_empty() && (!use_default || !has_default_feature),
})
}
- fn build_deps(&mut self,
- registry: &mut RegistryQueryer,
- parent: Option<&Summary>,
- candidate: &Summary,
- method: &Method) -> ActivateResult<Vec<DepInfo>> {
+ fn build_deps(
+ &mut self,
+ registry: &mut RegistryQueryer,
+ parent: Option<&Summary>,
+ candidate: &Summary,
+ method: &Method,
+ ) -> ActivateResult<Vec<DepInfo>> {
// First, figure out our set of dependencies based on the requested set
// of features. This also calculates what features we're going to enable
// for our own dependencies.
- let deps = self.resolve_features(parent,candidate, method)?;
+ let deps = self.resolve_features(parent, candidate, method)?;
// Next, transform all dependencies into a list of possible candidates
// which can satisfy that dependency.
- let mut deps = deps.into_iter().map(|(dep, features)| {
- let candidates = registry.query(&dep)?;
- Ok((dep, candidates, Rc::new(features)))
- }).collect::<CargoResult<Vec<DepInfo>>>()?;
+ let mut deps = deps.into_iter()
+ .map(|(dep, features)| {
+ let candidates = registry.query(&dep)?;
+ Ok((dep, candidates, Rc::new(features)))
+ })
+ .collect::<CargoResult<Vec<DepInfo>>>()?;
// Attempt to resolve dependencies with fewer candidates before trying
// dependencies with more candidates. This way if the dependency with
}
fn prev_active(&self, dep: &Dependency) -> &[Summary] {
- self.activations.get(&(dep.name(), dep.source_id().clone()))
+ self.activations
+ .get(&(dep.name(), dep.source_id().clone()))
.map(|v| &v[..])
.unwrap_or(&[])
}
fn is_active(&self, id: &PackageId) -> bool {
- self.activations.get(&(id.name(), id.source_id().clone()))
+ self.activations
+ .get(&(id.name(), id.source_id().clone()))
.map(|v| v.iter().any(|s| s.package_id() == id))
.unwrap_or(false)
}
/// Return all dependencies and the features we want from them.
- fn resolve_features<'b>(&mut self,
- parent: Option<&Summary>,
- s: &'b Summary,
- method: &'b Method)
- -> ActivateResult<Vec<(Dependency, Vec<String>)>> {
+ fn resolve_features<'b>(
+ &mut self,
+ parent: Option<&Summary>,
+ s: &'b Summary,
+ method: &'b Method,
+ ) -> ActivateResult<Vec<(Dependency, Vec<String>)>> {
let dev_deps = match *method {
Method::Everything => true,
Method::Required { dev_deps, .. } => dev_deps,
for dep in deps {
// Skip optional dependencies, but not those enabled through a feature
if dep.is_optional() && !reqs.deps.contains_key(&*dep.name()) {
- continue
+ continue;
}
// So we want this dependency. Move the features we want from `feature_deps`
// to `ret`.
let base = reqs.deps.remove(&*dep.name()).unwrap_or((false, vec![]));
if !dep.is_optional() && base.0 {
- self.warnings.push(
- format!("Package `{}` does not have feature `{}`. It has a required dependency \
- with that name, but only optional dependencies can be used as features. \
- This is currently a warning to ease the transition, but it will become an \
- error in the future.",
- s.package_id(), dep.name())
- );
+ self.warnings.push(format!(
+ "Package `{}` does not have feature `{}`. It has a required dependency \
+ with that name, but only optional dependencies can be used as features. \
+ This is currently a warning to ease the transition, but it will become an \
+ error in the future.",
+ s.package_id(),
+ dep.name()
+ ));
}
let mut base = base.1;
base.extend(dep.features().iter().cloned());
for feature in base.iter() {
if feature.contains('/') {
- return Err(format_err!("feature names may not contain slashes: `{}`", feature).into());
+ return Err(
+ format_err!("feature names may not contain slashes: `{}`", feature).into(),
+ );
}
}
ret.push((dep.clone(), base));
// have those dependencies. We classified them as dependencies in the first place
// because there is no such feature, either.
if !reqs.deps.is_empty() {
- let unknown = reqs.deps.keys()
- .map(|s| &s[..])
- .collect::<Vec<&str>>();
+ let unknown = reqs.deps.keys().map(|s| &s[..]).collect::<Vec<&str>>();
let features = unknown.join(", ");
return Err(match parent {
- None => format_err!("Package `{}` does not have these features: `{}`",
- s.package_id(), features).into(),
- Some(p) => (p.package_id().clone(), ConflictReason::MissingFeatures(features)).into(),
+ None => format_err!(
+ "Package `{}` does not have these features: `{}`",
+ s.package_id(),
+ features
+ ).into(),
+ Some(p) => (
+ p.package_id().clone(),
+ ConflictReason::MissingFeatures(features),
+ ).into(),
});
}
if !reqs.used.is_empty() {
let pkgid = s.package_id();
- let set = self.resolve_features.entry(pkgid.clone())
- .or_insert_with(HashSet::new);
+ let set = self.resolve_features
+ .entry(pkgid.clone())
+ .or_insert_with(HashSet::new);
for feature in reqs.used {
set.insert(InternedString::new(feature));
}
}
}
-fn check_cycles(resolve: &Resolve, activations: &Activations)
- -> CargoResult<()> {
- let summaries: HashMap<&PackageId, &Summary> = activations.values()
+fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()> {
+ let summaries: HashMap<&PackageId, &Summary> = activations
+ .values()
.flat_map(|v| v.iter())
.map(|s| (s.package_id(), s))
.collect();
let mut checked = HashSet::new();
for pkg in all_packages {
if !checked.contains(pkg) {
- visit(resolve,
- pkg,
- &summaries,
- &mut HashSet::new(),
- &mut checked)?
+ visit(resolve, pkg, &summaries, &mut HashSet::new(), &mut checked)?
}
}
return Ok(());
- fn visit<'a>(resolve: &'a Resolve,
- id: &'a PackageId,
- summaries: &HashMap<&'a PackageId, &Summary>,
- visited: &mut HashSet<&'a PackageId>,
- checked: &mut HashSet<&'a PackageId>)
- -> CargoResult<()> {
+ fn visit<'a>(
+ resolve: &'a Resolve,
+ id: &'a PackageId,
+ summaries: &HashMap<&'a PackageId, &Summary>,
+ visited: &mut HashSet<&'a PackageId>,
+ checked: &mut HashSet<&'a PackageId>,
+ ) -> CargoResult<()> {
// See if we visited ourselves
if !visited.insert(id) {
- bail!("cyclic package dependency: package `{}` depends on itself. Cycle:\n{}",
- id, describe_path(&resolve.graph, id));
+ bail!(
+ "cyclic package dependency: package `{}` depends on itself. Cycle:\n{}",
+ id,
+ describe_path(&resolve.graph, id)
+ );
}
// If we've already checked this node no need to recurse again as we'll
if checked.insert(id) {
let summary = summaries[id];
for dep in resolve.deps_not_replaced(id) {
- let is_transitive = summary.dependencies().iter().any(|d| {
- d.matches_id(dep) && d.is_transitive()
- });
+ let is_transitive = summary
+ .dependencies()
+ .iter()
+ .any(|d| d.matches_id(dep) && d.is_transitive());
let mut empty = HashSet::new();
- let visited = if is_transitive {&mut *visited} else {&mut empty};
+ let visited = if is_transitive {
+ &mut *visited
+ } else {
+ &mut empty
+ };
visit(resolve, dep, summaries, visited, checked)?;
if let Some(id) = resolve.replacement(dep) {
use std::io::prelude::*;
use atty;
-use termcolor::Color::{Green, Red, Yellow, Cyan};
-use termcolor::{self, StandardStream, Color, ColorSpec, WriteColor};
+use termcolor::Color::{Cyan, Green, Red, Yellow};
+use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor};
use util::errors::CargoResult;
pub enum Verbosity {
Verbose,
Normal,
- Quiet
+ Quiet,
}
/// An abstraction around a `Write`able object that remembers preferences for output verbosity and
impl fmt::Debug for Shell {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.err {
- ShellOut::Write(_) => {
- f.debug_struct("Shell")
- .field("verbosity", &self.verbosity)
- .finish()
- }
- ShellOut::Stream { color_choice, .. } => {
- f.debug_struct("Shell")
- .field("verbosity", &self.verbosity)
- .field("color_choice", &color_choice)
- .finish()
- }
+ ShellOut::Write(_) => f.debug_struct("Shell")
+ .field("verbosity", &self.verbosity)
+ .finish(),
+ ShellOut::Stream { color_choice, .. } => f.debug_struct("Shell")
+ .field("verbosity", &self.verbosity)
+ .field("color_choice", &color_choice)
+ .finish(),
}
}
}
/// Print a message, where the status will have `color` color, and can be justified. The
/// messages follows without color.
- fn print(&mut self,
- status: &fmt::Display,
- message: Option<&fmt::Display>,
- color: Color,
- justified: bool) -> CargoResult<()> {
+ fn print(
+ &mut self,
+ status: &fmt::Display,
+ message: Option<&fmt::Display>,
+ color: Color,
+ justified: bool,
+ ) -> CargoResult<()> {
match self.verbosity {
Verbosity::Quiet => Ok(()),
- _ => {
- self.err.print(status, message, color, justified)
- }
+ _ => self.err.print(status, message, color, justified),
}
}
/// Shortcut to right-align and color green a status message.
pub fn status<T, U>(&mut self, status: T, message: U) -> CargoResult<()>
- where T: fmt::Display, U: fmt::Display
+ where
+ T: fmt::Display,
+ U: fmt::Display,
{
self.print(&status, Some(&message), Green, true)
}
pub fn status_header<T>(&mut self, status: T) -> CargoResult<()>
- where T: fmt::Display,
+ where
+ T: fmt::Display,
{
self.print(&status, None, Cyan, true)
}
/// Shortcut to right-align a status message.
- pub fn status_with_color<T, U>(&mut self,
- status: T,
- message: U,
- color: Color) -> CargoResult<()>
- where T: fmt::Display, U: fmt::Display
+ pub fn status_with_color<T, U>(
+ &mut self,
+ status: T,
+ message: U,
+ color: Color,
+ ) -> CargoResult<()>
+ where
+ T: fmt::Display,
+ U: fmt::Display,
{
self.print(&status, Some(&message), color, true)
}
/// Run the callback only if we are in verbose mode
pub fn verbose<F>(&mut self, mut callback: F) -> CargoResult<()>
- where F: FnMut(&mut Shell) -> CargoResult<()>
+ where
+ F: FnMut(&mut Shell) -> CargoResult<()>,
{
match self.verbosity {
Verbosity::Verbose => callback(self),
- _ => Ok(())
+ _ => Ok(()),
}
}
/// Run the callback if we are not in verbose mode.
pub fn concise<F>(&mut self, mut callback: F) -> CargoResult<()>
- where F: FnMut(&mut Shell) -> CargoResult<()>
+ where
+ F: FnMut(&mut Shell) -> CargoResult<()>,
{
match self.verbosity {
Verbosity::Verbose => Ok(()),
- _ => callback(self)
+ _ => callback(self),
}
}
/// Update the color choice (always, never, or auto) from a string.
pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> {
- if let ShellOut::Stream { ref mut stream, ref mut color_choice, .. } = self.err {
+ if let ShellOut::Stream {
+ ref mut stream,
+ ref mut color_choice,
+ ..
+ } = self.err
+ {
let cfg = match color {
Some("always") => ColorChoice::Always,
Some("never") => ColorChoice::Never,
- Some("auto") |
- None => ColorChoice::CargoAuto,
+ Some("auto") | None => ColorChoice::CargoAuto,
- Some(arg) => bail!("argument for --color must be auto, always, or \
- never, but found `{}`", arg),
+ Some(arg) => bail!(
+ "argument for --color must be auto, always, or \
+ never, but found `{}`",
+ arg
+ ),
};
*color_choice = cfg;
*stream = StandardStream::stderr(cfg.to_termcolor_color_choice());
impl ShellOut {
/// Print out a message with a status. The status comes first and is bold + the given color.
/// The status can be justified, in which case the max width that will right align is 12 chars.
- fn print(&mut self,
- status: &fmt::Display,
- message: Option<&fmt::Display>,
- color: Color,
- justified: bool) -> CargoResult<()> {
+ fn print(
+ &mut self,
+ status: &fmt::Display,
+ message: Option<&fmt::Display>,
+ color: Color,
+ justified: bool,
+ ) -> CargoResult<()> {
match *self {
ShellOut::Stream { ref mut stream, .. } => {
stream.reset()?;
- stream.set_color(ColorSpec::new()
- .set_bold(true)
- .set_fg(Some(color)))?;
+ stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?;
if justified {
write!(stream, "{:>12}", status)?;
} else {
unsafe {
let mut winsize: libc::winsize = mem::zeroed();
if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 {
- return None
+ return None;
}
if winsize.ws_col > 0 {
Some(winsize.ws_col as usize)
let stdout = GetStdHandle(STD_ERROR_HANDLE);
let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
if GetConsoleScreenBufferInfo(stdout, &mut csbi) == 0 {
- return None
+ return None;
}
Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize)
}
-use std::collections::hash_map::{HashMap, Values, IterMut};
+use std::collections::hash_map::{HashMap, IterMut, Values};
use core::{Package, PackageId, Registry};
use util::CargoResult;
mod source_id;
-pub use self::source_id::{SourceId, GitReference};
+pub use self::source_id::{GitReference, SourceId};
/// A Source finds and downloads remote packages based on names and
/// versions.
impl<'src> SourceMap<'src> {
/// Create an empty map
pub fn new() -> SourceMap<'src> {
- SourceMap { map: HashMap::new() }
+ SourceMap {
+ map: HashMap::new(),
+ }
}
/// Like `HashMap::contains_key`
/// Like `HashMap::iter_mut`
pub fn sources_mut<'a>(&'a mut self) -> SourcesMut<'a, 'src> {
- SourcesMut { inner: self.map.iter_mut() }
+ SourcesMut {
+ inner: self.map.iter_mut(),
+ }
}
}
self.inner.next().map(|(a, b)| (a, &mut **b))
}
}
-
use ops;
use sources::git;
-use sources::{PathSource, GitSource, RegistrySource, CRATES_IO};
+use sources::{GitSource, PathSource, RegistrySource, CRATES_IO};
use sources::DirectorySource;
-use util::{Config, CargoResult, ToUrl};
+use util::{CargoResult, Config, ToUrl};
/// Unique identifier for a source of packages.
#[derive(Clone, Eq, Debug)]
pub fn from_url(string: &str) -> CargoResult<SourceId> {
let mut parts = string.splitn(2, '+');
let kind = parts.next().unwrap();
- let url = parts.next().ok_or_else(|| format_err!("invalid source `{}`", string))?;
+ let url = parts
+ .next()
+ .ok_or_else(|| format_err!("invalid source `{}`", string))?;
match kind {
"git" => {
for (k, v) in url.query_pairs() {
match &k[..] {
// map older 'ref' to branch
- "branch" |
- "ref" => reference = GitReference::Branch(v.into_owned()),
+ "branch" | "ref" => reference = GitReference::Branch(v.into_owned()),
"rev" => reference = GitReference::Rev(v.into_owned()),
"tag" => reference = GitReference::Tag(v.into_owned()),
url.set_fragment(None);
url.set_query(None);
Ok(SourceId::for_git(&url, reference)?.with_precise(precise))
- },
+ }
"registry" => {
let url = url.to_url()?;
- Ok(SourceId::new(Kind::Registry, url)?
- .with_precise(Some("locked".to_string())))
+ Ok(SourceId::new(Kind::Registry, url)?.with_precise(Some("locked".to_string())))
}
"path" => {
let url = url.to_url()?;
SourceId::new(Kind::Path, url)
}
- kind => Err(format_err!("unsupported source protocol: {}", kind))
+ kind => Err(format_err!("unsupported source protocol: {}", kind)),
}
}
/// A view of the `SourceId` that can be `Display`ed as a URL
pub fn to_url(&self) -> SourceIdToUrl {
- SourceIdToUrl { inner: &*self.inner }
+ SourceIdToUrl {
+ inner: &*self.inner,
+ }
}
/// Create a SourceId from a filesystem path.
let url = if let Some(ref index) = cfg.index {
static WARNED: AtomicBool = ATOMIC_BOOL_INIT;
if !WARNED.swap(true, SeqCst) {
- config.shell().warn("custom registry support via \
- the `registry.index` configuration is \
- being removed, this functionality \
- will not work in the future")?;
+ config.shell().warn(
+ "custom registry support via \
+ the `registry.index` configuration is \
+ being removed, this functionality \
+ will not work in the future",
+ )?;
}
&index[..]
} else {
pub fn is_registry(&self) -> bool {
match self.inner.kind {
Kind::Registry | Kind::LocalRegistry => true,
- _ => false,
+ _ => false,
}
}
inner: Arc::new(SourceIdInner {
precise: v,
..(*self.inner).clone()
- })
+ }),
}
}
/// same hash in different locations.
pub fn stable_hash<S: hash::Hasher>(&self, workspace: &Path, into: &mut S) {
if self.is_path() {
- if let Ok(p) = self.inner.url.to_file_path().unwrap().strip_prefix(workspace) {
+ if let Ok(p) = self.inner
+ .url
+ .to_file_path()
+ .unwrap()
+ .strip_prefix(workspace)
+ {
self.inner.kind.hash(into);
p.to_str().unwrap().hash(into);
- return
+ return;
}
}
self.hash(into)
impl ser::Serialize for SourceId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
if self.is_path() {
None::<String>.serialize(s)
impl<'de> de::Deserialize<'de> for SourceId {
fn deserialize<D>(d: D) -> Result<SourceId, D::Error>
- where D: de::Deserializer<'de>,
+ where
+ D: de::Deserializer<'de>,
{
let string = String::deserialize(d)?;
SourceId::from_url(&string).map_err(de::Error::custom)
impl fmt::Display for SourceId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self.inner {
- SourceIdInner { kind: Kind::Path, ref url, .. } => {
- fmt::Display::fmt(url, f)
- }
- SourceIdInner { kind: Kind::Git(ref reference), ref url,
- ref precise, .. } => {
+ SourceIdInner {
+ kind: Kind::Path,
+ ref url,
+ ..
+ } => fmt::Display::fmt(url, f),
+ SourceIdInner {
+ kind: Kind::Git(ref reference),
+ ref url,
+ ref precise,
+ ..
+ } => {
write!(f, "{}", url)?;
if let Some(pretty) = reference.pretty_ref() {
write!(f, "?{}", pretty)?;
}
Ok(())
}
- SourceIdInner { kind: Kind::Registry, ref url, .. } |
- SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => {
- write!(f, "registry `{}`", url)
- }
- SourceIdInner { kind: Kind::Directory, ref url, .. } => {
- write!(f, "dir {}", url)
+ SourceIdInner {
+ kind: Kind::Registry,
+ ref url,
+ ..
}
+ | SourceIdInner {
+ kind: Kind::LocalRegistry,
+ ref url,
+ ..
+ } => write!(f, "registry `{}`", url),
+ SourceIdInner {
+ kind: Kind::Directory,
+ ref url,
+ ..
+ } => write!(f, "dir {}", url),
}
}
}
fn hash<S: hash::Hasher>(&self, into: &mut S) {
self.inner.kind.hash(into);
match *self.inner {
- SourceIdInner { kind: Kind::Git(..), ref canonical_url, .. } => {
- canonical_url.as_str().hash(into)
- }
+ SourceIdInner {
+ kind: Kind::Git(..),
+ ref canonical_url,
+ ..
+ } => canonical_url.as_str().hash(into),
_ => self.inner.url.as_str().hash(into),
}
}
impl<'a> fmt::Display for SourceIdToUrl<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self.inner {
- SourceIdInner { kind: Kind::Path, ref url, .. } => {
- write!(f, "path+{}", url)
- }
SourceIdInner {
- kind: Kind::Git(ref reference), ref url, ref precise, ..
+ kind: Kind::Path,
+ ref url,
+ ..
+ } => write!(f, "path+{}", url),
+ SourceIdInner {
+ kind: Kind::Git(ref reference),
+ ref url,
+ ref precise,
+ ..
} => {
write!(f, "git+{}", url)?;
if let Some(pretty) = reference.pretty_ref() {
}
Ok(())
}
- SourceIdInner { kind: Kind::Registry, ref url, .. } => {
- write!(f, "registry+{}", url)
- }
- SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => {
- write!(f, "local-registry+{}", url)
- }
- SourceIdInner { kind: Kind::Directory, ref url, .. } => {
- write!(f, "directory+{}", url)
- }
+ SourceIdInner {
+ kind: Kind::Registry,
+ ref url,
+ ..
+ } => write!(f, "registry+{}", url),
+ SourceIdInner {
+ kind: Kind::LocalRegistry,
+ ref url,
+ ..
+ } => write!(f, "local-registry+{}", url),
+ SourceIdInner {
+ kind: Kind::Directory,
+ ref url,
+ ..
+ } => write!(f, "directory+{}", url),
}
}
}
#[cfg(test)]
mod tests {
- use super::{SourceId, Kind, GitReference};
+ use super::{GitReference, Kind, SourceId};
use util::ToUrl;
#[test]
}
impl Summary {
- pub fn new(pkg_id: PackageId,
- dependencies: Vec<Dependency>,
- features: BTreeMap<String, Vec<String>>,
- links: Option<String>) -> CargoResult<Summary> {
+ pub fn new(
+ pkg_id: PackageId,
+ dependencies: Vec<Dependency>,
+ features: BTreeMap<String, Vec<String>>,
+ links: Option<String>,
+ ) -> CargoResult<Summary> {
for dep in dependencies.iter() {
if features.get(&*dep.name()).is_some() {
- bail!("Features and dependencies cannot have the \
- same name: `{}`", dep.name())
+ bail!(
+ "Features and dependencies cannot have the \
+ same name: `{}`",
+ dep.name()
+ )
}
if dep.is_optional() && !dep.is_transitive() {
- bail!("Dev-dependencies are not allowed to be optional: `{}`",
- dep.name())
+ bail!(
+ "Dev-dependencies are not allowed to be optional: `{}`",
+ dep.name()
+ )
}
}
for (feature, list) in features.iter() {
let mut parts = dep.splitn(2, '/');
let dep = parts.next().unwrap();
let is_reexport = parts.next().is_some();
- if !is_reexport && features.get(dep).is_some() { continue }
+ if !is_reexport && features.get(dep).is_some() {
+ continue;
+ }
match dependencies.iter().find(|d| &*d.name() == dep) {
Some(d) => {
- if d.is_optional() || is_reexport { continue }
- bail!("Feature `{}` depends on `{}` which is not an \
- optional dependency.\nConsider adding \
- `optional = true` to the dependency",
- feature, dep)
- }
- None if is_reexport => {
- bail!("Feature `{}` requires a feature of `{}` which is not a \
- dependency", feature, dep)
- }
- None => {
- bail!("Feature `{}` includes `{}` which is neither \
- a dependency nor another feature", feature, dep)
+ if d.is_optional() || is_reexport {
+ continue;
+ }
+ bail!(
+ "Feature `{}` depends on `{}` which is not an \
+ optional dependency.\nConsider adding \
+ `optional = true` to the dependency",
+ feature,
+ dep
+ )
}
+ None if is_reexport => bail!(
+ "Feature `{}` requires a feature of `{}` which is not a \
+ dependency",
+ feature,
+ dep
+ ),
+ None => bail!(
+ "Feature `{}` includes `{}` which is neither \
+ a dependency nor another feature",
+ feature,
+ dep
+ ),
}
}
}
})
}
- pub fn package_id(&self) -> &PackageId { &self.inner.package_id }
- pub fn name(&self) -> InternedString { self.package_id().name() }
- pub fn version(&self) -> &Version { self.package_id().version() }
- pub fn source_id(&self) -> &SourceId { self.package_id().source_id() }
- pub fn dependencies(&self) -> &[Dependency] { &self.inner.dependencies }
- pub fn features(&self) -> &BTreeMap<String, Vec<String>> { &self.inner.features }
+ pub fn package_id(&self) -> &PackageId {
+ &self.inner.package_id
+ }
+ pub fn name(&self) -> InternedString {
+ self.package_id().name()
+ }
+ pub fn version(&self) -> &Version {
+ self.package_id().version()
+ }
+ pub fn source_id(&self) -> &SourceId {
+ self.package_id().source_id()
+ }
+ pub fn dependencies(&self) -> &[Dependency] {
+ &self.inner.dependencies
+ }
+ pub fn features(&self) -> &BTreeMap<String, Vec<String>> {
+ &self.inner.features
+ }
pub fn checksum(&self) -> Option<&str> {
self.inner.checksum.as_ref().map(|s| &s[..])
}
}
pub fn map_dependencies<F>(mut self, f: F) -> Summary
- where F: FnMut(Dependency) -> Dependency {
+ where
+ F: FnMut(Dependency) -> Dependency,
+ {
{
let slot = &mut Rc::make_mut(&mut self.inner).dependencies;
let deps = mem::replace(slot, Vec::new());
self
}
- pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId)
- -> Summary {
+ pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Summary {
let me = if self.package_id().source_id() == to_replace {
let new_id = self.package_id().with_source_id(replace_with);
self.override_id(new_id)
} else {
self
};
- me.map_dependencies(|dep| {
- dep.map_source(to_replace, replace_with)
- })
+ me.map_dependencies(|dep| dep.map_source(to_replace, replace_with))
}
}
-use std::collections::hash_map::{HashMap, Entry};
+use std::collections::hash_map::{Entry, HashMap};
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::slice;
use glob::glob;
use url::Url;
-use core::{Package, VirtualManifest, EitherManifest, SourceId};
-use core::{PackageIdSpec, Dependency, Profile, Profiles};
+use core::{EitherManifest, Package, SourceId, VirtualManifest};
+use core::{Dependency, PackageIdSpec, Profile, Profiles};
use util::{Config, Filesystem};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
/// This function will construct the entire workspace by determining the
/// root and all member packages. It will then validate the workspace
/// before returning it, so `Ok` is only returned for valid workspaces.
- pub fn new(manifest_path: &Path, config: &'cfg Config)
- -> CargoResult<Workspace<'cfg>> {
+ pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult<Workspace<'cfg>> {
let target_dir = config.target_dir()?;
let mut ws = Workspace {
///
/// This is currently only used in niche situations like `cargo install` or
/// `cargo package`.
- pub fn ephemeral(package: Package,
- config: &'cfg Config,
- target_dir: Option<Filesystem>,
- require_optional_deps: bool) -> CargoResult<Workspace<'cfg>> {
+ pub fn ephemeral(
+ package: Package,
+ config: &'cfg Config,
+ target_dir: Option<Filesystem>,
+ require_optional_deps: bool,
+ ) -> CargoResult<Workspace<'cfg>> {
let mut ws = Workspace {
config,
current_manifest: package.manifest_path().to_path_buf(),
/// indicating that something else should be passed.
pub fn current(&self) -> CargoResult<&Package> {
let pkg = self.current_opt().ok_or_else(|| {
- format_err!("manifest path `{}` is a virtual manifest, but this \
- command requires running against an actual package in \
- this workspace", self.current_manifest.display())
+ format_err!(
+ "manifest path `{}` is a virtual manifest, but this \
+ command requires running against an actual package in \
+ this workspace",
+ self.current_manifest.display()
+ )
})?;
Ok(pkg)
}
pub fn current_opt(&self) -> Option<&Package> {
match *self.packages.get(&self.current_manifest) {
MaybePackage::Package(ref p) => Some(p),
- MaybePackage::Virtual(..) => None
+ MaybePackage::Virtual(..) => None,
}
}
pub fn is_virtual(&self) -> bool {
match *self.packages.get(&self.current_manifest) {
MaybePackage::Package(..) => false,
- MaybePackage::Virtual(..) => true
+ MaybePackage::Virtual(..) => true,
}
}
}
pub fn profiles(&self) -> &Profiles {
- let root = self.root_manifest.as_ref().unwrap_or(&self.current_manifest);
+ let root = self.root_manifest
+ .as_ref()
+ .unwrap_or(&self.current_manifest);
match *self.packages.get(root) {
MaybePackage::Package(ref p) => p.manifest().profiles(),
MaybePackage::Virtual(ref vm) => vm.profiles(),
pub fn root(&self) -> &Path {
match self.root_manifest {
Some(ref p) => p,
- None => &self.current_manifest
- }.parent().unwrap()
+ None => &self.current_manifest,
+ }.parent()
+ .unwrap()
}
pub fn target_dir(&self) -> Filesystem {
- self.target_dir.clone().unwrap_or_else(|| {
- Filesystem::new(self.root().join("target"))
- })
+ self.target_dir
+ .clone()
+ .unwrap_or_else(|| Filesystem::new(self.root().join("target")))
}
/// Returns the root [replace] section of this workspace.
self.require_optional_deps
}
- pub fn set_require_optional_deps<'a>(&'a mut self, require_optional_deps: bool) -> &mut Workspace<'cfg> {
+ pub fn set_require_optional_deps<'a>(
+ &'a mut self,
+ require_optional_deps: bool,
+ ) -> &mut Workspace<'cfg> {
self.require_optional_deps = require_optional_deps;
self
}
///
/// Returns an error if `manifest_path` isn't actually a valid manifest or
/// if some other transient error happens.
- fn find_root(&mut self, manifest_path: &Path)
- -> CargoResult<Option<PathBuf>> {
+ fn find_root(&mut self, manifest_path: &Path) -> CargoResult<Option<PathBuf>> {
fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult<PathBuf> {
- let path = member_manifest.parent().unwrap()
+ let path = member_manifest
+ .parent()
+ .unwrap()
.join(root_link)
.join("Cargo.toml");
debug!("find_root - pointer {}", path.display());
match *current.workspace_config() {
WorkspaceConfig::Root(_) => {
debug!("find_root - is root {}", manifest_path.display());
- return Ok(Some(manifest_path.to_path_buf()))
- }
- WorkspaceConfig::Member { root: Some(ref path_to_root) } => {
- return Ok(Some(read_root_pointer(manifest_path, path_to_root)?))
+ return Ok(Some(manifest_path.to_path_buf()));
}
+ WorkspaceConfig::Member {
+ root: Some(ref path_to_root),
+ } => return Ok(Some(read_root_pointer(manifest_path, path_to_root)?)),
WorkspaceConfig::Member { root: None } => {}
}
}
debug!("find_root - found a root checking exclusion");
if !ances_root_config.is_excluded(manifest_path) {
debug!("find_root - found!");
- return Ok(Some(ances_manifest_path))
+ return Ok(Some(ances_manifest_path));
}
}
- WorkspaceConfig::Member { root: Some(ref path_to_root) } => {
+ WorkspaceConfig::Member {
+ root: Some(ref path_to_root),
+ } => {
debug!("find_root - found pointer");
- return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?))
+ return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?));
}
WorkspaceConfig::Member { .. } => {}
}
// current project, but we don't want to mistakenly try to put
// crates.io crates into the workspace by accident.
if self.config.home() == path {
- break
+ break;
}
}
debug!("find_members - only me as a member");
self.members.push(self.current_manifest.clone());
self.default_members.push(self.current_manifest.clone());
- return Ok(())
+ return Ok(());
}
};
let root_package = self.packages.load(&root_manifest_path)?;
match *root_package.workspace_config() {
WorkspaceConfig::Root(ref root_config) => {
- members_paths = root_config.members_paths(
- root_config.members.as_ref().unwrap_or(&vec![])
- )?;
+ members_paths =
+ root_config.members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?;
default_members_paths = if let Some(ref default) = root_config.default_members {
Some(root_config.members_paths(default)?)
} else {
None
}
}
- _ => bail!("root of a workspace inferred but wasn't a root: {}",
- root_manifest_path.display()),
+ _ => bail!(
+ "root of a workspace inferred but wasn't a root: {}",
+ root_manifest_path.display()
+ ),
}
}
for path in default {
let manifest_path = paths::normalize_path(&path.join("Cargo.toml"));
if !self.members.contains(&manifest_path) {
- bail!("package `{}` is listed in workspace’s default-members \
- but is not a member.",
- path.display())
+ bail!(
+ "package `{}` is listed in workspace’s default-members \
+ but is not a member.",
+ path.display()
+ )
}
self.default_members.push(manifest_path)
}
self.find_path_deps(&root_manifest_path, &root_manifest_path, false)
}
- fn find_path_deps(&mut self,
- manifest_path: &Path,
- root_manifest: &Path,
- is_path_dep: bool) -> CargoResult<()> {
+ fn find_path_deps(
+ &mut self,
+ manifest_path: &Path,
+ root_manifest: &Path,
+ is_path_dep: bool,
+ ) -> CargoResult<()> {
let manifest_path = paths::normalize_path(manifest_path);
if self.members.contains(&manifest_path) {
- return Ok(())
+ return Ok(());
}
- if is_path_dep
- && !manifest_path.parent().unwrap().starts_with(self.root())
- && self.find_root(&manifest_path)? != self.root_manifest {
+ if is_path_dep && !manifest_path.parent().unwrap().starts_with(self.root())
+ && self.find_root(&manifest_path)? != self.root_manifest
+ {
// If `manifest_path` is a path dependency outside of the workspace,
// don't add it, or any of its dependencies, as a members.
- return Ok(())
+ return Ok(());
}
- if let WorkspaceConfig::Root(ref root_config) = *self.packages.load(root_manifest)?.workspace_config() {
+ if let WorkspaceConfig::Root(ref root_config) =
+ *self.packages.load(root_manifest)?.workspace_config()
+ {
if root_config.is_excluded(&manifest_path) {
- return Ok(())
+ return Ok(());
}
}
MaybePackage::Virtual(_) => return Ok(()),
};
pkg.dependencies()
- .iter()
- .map(|d| d.source_id())
- .filter(|d| d.is_path())
- .filter_map(|d| d.url().to_file_path().ok())
- .map(|p| p.join("Cargo.toml"))
- .collect::<Vec<_>>()
+ .iter()
+ .map(|d| d.source_id())
+ .filter(|d| d.is_path())
+ .filter_map(|d| d.url().to_file_path().ok())
+ .map(|p| p.join("Cargo.toml"))
+ .collect::<Vec<_>>()
};
for candidate in candidates {
self.find_path_deps(&candidate, root_manifest, true)?;
/// 3. The current crate is a member of this workspace.
fn validate(&mut self) -> CargoResult<()> {
if self.root_manifest.is_none() {
- return Ok(())
+ return Ok(());
}
let mut roots = Vec::new();
MaybePackage::Virtual(_) => continue,
};
if let Some(prev) = names.insert(name, member) {
- bail!("two packages named `{}` in this workspace:\n\
- - {}\n\
- - {}", name, prev.display(), member.display());
+ bail!(
+ "two packages named `{}` in this workspace:\n\
+ - {}\n\
+ - {}",
+ name,
+ prev.display(),
+ member.display()
+ );
}
}
}
match roots.len() {
- 0 => {
- bail!("`package.workspace` configuration points to a crate \
- which is not configured with [workspace]: \n\
- configuration at: {}\n\
- points to: {}",
- self.current_manifest.display(),
- self.root_manifest.as_ref().unwrap().display())
- }
+ 0 => bail!(
+ "`package.workspace` configuration points to a crate \
+ which is not configured with [workspace]: \n\
+ configuration at: {}\n\
+ points to: {}",
+ self.current_manifest.display(),
+ self.root_manifest.as_ref().unwrap().display()
+ ),
1 => {}
_ => {
- bail!("multiple workspace roots found in the same workspace:\n{}",
- roots.iter()
- .map(|r| format!(" {}", r.display()))
- .collect::<Vec<_>>()
- .join("\n"));
+ bail!(
+ "multiple workspace roots found in the same workspace:\n{}",
+ roots
+ .iter()
+ .map(|r| format!(" {}", r.display()))
+ .collect::<Vec<_>>()
+ .join("\n")
+ );
}
}
for member in self.members.clone() {
let root = self.find_root(&member)?;
if root == self.root_manifest {
- continue
+ continue;
}
match root {
Some(root) => {
- bail!("package `{}` is a member of the wrong workspace\n\
- expected: {}\n\
- actual: {}",
- member.display(),
- self.root_manifest.as_ref().unwrap().display(),
- root.display());
+ bail!(
+ "package `{}` is a member of the wrong workspace\n\
+ expected: {}\n\
+ actual: {}",
+ member.display(),
+ self.root_manifest.as_ref().unwrap().display(),
+ root.display()
+ );
}
None => {
- bail!("workspace member `{}` is not hierarchically below \
- the workspace root `{}`",
- member.display(),
- self.root_manifest.as_ref().unwrap().display());
+ bail!(
+ "workspace member `{}` is not hierarchically below \
+ the workspace root `{}`",
+ member.display(),
+ self.root_manifest.as_ref().unwrap().display()
+ );
}
}
}
// FIXME: Make this more generic by using a relative path resolver between member and
// root.
let members_msg = match current_dir.strip_prefix(root_dir) {
- Ok(rel) => {
- format!("this may be fixable by adding `{}` to the \
- `workspace.members` array of the manifest \
- located at: {}",
- rel.display(),
- root.display())
- }
- Err(_) => {
- format!("this may be fixable by adding a member to \
- the `workspace.members` array of the \
- manifest located at: {}", root.display())
- }
+ Ok(rel) => format!(
+ "this may be fixable by adding `{}` to the \
+ `workspace.members` array of the manifest \
+ located at: {}",
+ rel.display(),
+ root.display()
+ ),
+ Err(_) => format!(
+ "this may be fixable by adding a member to \
+ the `workspace.members` array of the \
+ manifest located at: {}",
+ root.display()
+ ),
};
let extra = match *root_pkg {
MaybePackage::Virtual(_) => members_msg,
WorkspaceConfig::Member { .. } => unreachable!(),
};
if !has_members_list {
- format!("this may be fixable by ensuring that this \
- crate is depended on by the workspace \
- root: {}", root.display())
+ format!(
+ "this may be fixable by ensuring that this \
+ crate is depended on by the workspace \
+ root: {}",
+ root.display()
+ )
} else {
members_msg
}
}
};
- bail!("current package believes it's in a workspace when it's not:\n\
- current: {}\n\
- workspace: {}\n\n{}",
- self.current_manifest.display(),
- root.display(),
- extra);
+ bail!(
+ "current package believes it's in a workspace when it's not:\n\
+ current: {}\n\
+ workspace: {}\n\n{}",
+ self.current_manifest.display(),
+ root.display(),
+ extra
+ );
}
if let Some(ref root_manifest) = self.root_manifest {
doctest: Profile::default_doctest(),
};
- for pkg in self.members().filter(|p| p.manifest_path() != root_manifest) {
+ for pkg in self.members()
+ .filter(|p| p.manifest_path() != root_manifest)
+ {
if pkg.manifest().profiles() != &default_profiles {
- let message = &format!("profiles for the non root package will be ignored, \
- specify profiles at the workspace root:\n\
- package: {}\n\
- workspace: {}",
- pkg.manifest_path().display(),
- root_manifest.display());
+ let message = &format!(
+ "profiles for the non root package will be ignored, \
+ specify profiles at the workspace root:\n\
+ package: {}\n\
+ workspace: {}",
+ pkg.manifest_path().display(),
+ root_manifest.display()
+ );
//TODO: remove `Eq` bound from `Profiles` when the warning is removed.
self.config.shell().warn(&message)?;
}
}
-
impl<'cfg> Packages<'cfg> {
fn get(&self, manifest_path: &Path) -> &MaybePackage {
&self.packages[manifest_path.parent().unwrap()]
EitherManifest::Real(manifest) => {
MaybePackage::Package(Package::new(manifest, manifest_path))
}
- EitherManifest::Virtual(vm) => {
- MaybePackage::Virtual(vm)
- }
+ EitherManifest::Virtual(vm) => MaybePackage::Virtual(vm),
}))
}
}
fn next(&mut self) -> Option<&'a Package> {
loop {
- let next = self.iter.next().map(|path| {
- self.ws.packages.get(path)
- });
+ let next = self.iter.next().map(|path| self.ws.packages.get(path));
match next {
Some(&MaybePackage::Package(ref p)) => return Some(p),
Some(&MaybePackage::Virtual(_)) => {}
///
/// This method does NOT consider the `members` list.
fn is_excluded(&self, manifest_path: &Path) -> bool {
- let excluded = self.exclude.iter().any(|ex| {
- manifest_path.starts_with(self.root_dir.join(ex))
- });
+ let excluded = self.exclude
+ .iter()
+ .any(|ex| manifest_path.starts_with(self.root_dir.join(ex)));
let explicit_member = match self.members {
- Some(ref members) => {
- members.iter().any(|mem| {
- manifest_path.starts_with(self.root_dir.join(mem))
- })
- }
+ Some(ref members) => members
+ .iter()
+ .any(|mem| manifest_path.starts_with(self.root_dir.join(mem))),
None => false,
};
Some(p) => p,
None => return Ok(Vec::new()),
};
- let res = glob(path).chain_err(|| {
- format_err!("could not parse pattern `{}`", &path)
- })?;
+ let res = glob(path).chain_err(|| format_err!("could not parse pattern `{}`", &path))?;
let res = res.map(|p| {
- p.chain_err(|| {
- format_err!("unable to match path to pattern `{}`", &path)
- })
+ p.chain_err(|| format_err!("unable to match path to pattern `{}`", &path))
}).collect::<Result<Vec<_>, _>>()?;
Ok(res)
}
#![cfg_attr(test, deny(warnings))]
-
// Currently, Cargo does not use clippy for its source code.
// But if someone runs it they should know that
// @alexcrichton disagree with clippy on some style things
#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))]
-#[macro_use] extern crate failure;
-#[macro_use] extern crate log;
-#[macro_use] extern crate scoped_tls;
-#[macro_use] extern crate serde_derive;
-#[macro_use] extern crate serde_json;
extern crate atty;
extern crate clap;
+#[cfg(target_os = "macos")]
+extern crate core_foundation;
extern crate crates_io as registry;
extern crate crossbeam;
extern crate curl;
+#[macro_use]
+extern crate failure;
extern crate filetime;
extern crate flate2;
extern crate fs2;
extern crate home;
extern crate ignore;
extern crate jobserver;
+#[macro_use]
+extern crate lazy_static;
extern crate lazycell;
-#[macro_use] extern crate lazy_static;
extern crate libc;
extern crate libgit2_sys;
+#[macro_use]
+extern crate log;
extern crate num_cpus;
extern crate same_file;
+#[macro_use]
+extern crate scoped_tls;
extern crate semver;
extern crate serde;
+#[macro_use]
+extern crate serde_derive;
extern crate serde_ignored;
+#[macro_use]
+extern crate serde_json;
extern crate shell_escape;
extern crate tar;
extern crate tempdir;
extern crate termcolor;
extern crate toml;
extern crate url;
-#[cfg(target_os = "macos")]
-extern crate core_foundation;
use std::fmt;
impl fmt::Display for VersionInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "cargo {}.{}.{}",
- self.major, self.minor, self.patch)?;
+ write!(f, "cargo {}.{}.{}", self.major, self.minor, self.patch)?;
if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) {
if channel != "stable" {
write!(f, "-{}", channel)?;
if let Some(ref cfg) = self.cfg_info {
if let Some(ref ci) = cfg.commit_info {
- write!(f, " ({} {})",
- ci.short_commit_hash, ci.commit_date)?;
+ write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
}
};
Ok(())
}
}
- let CliError { error, exit_code, unknown } = err;
+ let CliError {
+ error,
+ exit_code,
+ unknown,
+ } = err;
// exit_code == 0 is non-fatal error, e.g. docopt version info
let fatal = exit_code != 0;
}
if !handle_cause(&error, shell) || hide {
- drop(writeln!(shell.err(), "\nTo learn more, run the command again \
- with --verbose."));
+ drop(writeln!(
+ shell.err(),
+ "\nTo learn more, run the command again \
+ with --verbose."
+ ));
}
}
match option_env!("CFG_RELEASE_CHANNEL") {
// We have environment variables set up from configure/make.
Some(_) => {
- let commit_info =
- option_env!("CFG_COMMIT_HASH").map(|s| {
- CommitInfo {
- commit_hash: s.to_string(),
- short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(),
- commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(),
- }
- });
+ let commit_info = option_env!("CFG_COMMIT_HASH").map(|s| CommitInfo {
+ commit_hash: s.to_string(),
+ short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(),
+ commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(),
+ });
VersionInfo {
major,
minor,
commit_info,
}),
}
- },
- // We are being compiled by Cargo itself.
- None => {
- VersionInfo {
- major,
- minor,
- patch,
- pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
- cfg_info: None,
- }
}
+ // We are being compiled by Cargo itself.
+ None => VersionInfo {
+ major,
+ minor,
+ patch,
+ pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
+ cfg_info: None,
+ },
}
}
use util::Config;
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
-use ops::{self, Context, BuildConfig, Kind, Unit};
+use ops::{self, BuildConfig, Context, Kind, Unit};
pub struct CleanOptions<'a> {
pub config: &'a Config,
let profiles = ws.profiles();
let host_triple = opts.config.rustc()?.host.clone();
- let mut cx = Context::new(ws, &resolve, &packages, opts.config,
- BuildConfig {
- host_triple,
- requested_target: opts.target.clone(),
- release: opts.release,
- jobs: 1,
- ..BuildConfig::default()
- },
- profiles)?;
+ let mut cx = Context::new(
+ ws,
+ &resolve,
+ &packages,
+ opts.config,
+ BuildConfig {
+ host_triple,
+ requested_target: opts.target.clone(),
+ release: opts.release,
+ jobs: 1,
+ ..BuildConfig::default()
+ },
+ profiles,
+ )?;
let mut units = Vec::new();
for spec in opts.spec.iter() {
for target in pkg.targets() {
for kind in [Kind::Host, Kind::Target].iter() {
let Profiles {
- ref release, ref dev, ref test, ref bench, ref doc,
- ref custom_build, ref test_deps, ref bench_deps, ref check,
- ref check_test, ref doctest,
+ ref release,
+ ref dev,
+ ref test,
+ ref bench,
+ ref doc,
+ ref custom_build,
+ ref test_deps,
+ ref bench_deps,
+ ref check,
+ ref check_test,
+ ref doctest,
} = *profiles;
- let profiles = [release, dev, test, bench, doc, custom_build,
- test_deps, bench_deps, check, check_test, doctest];
+ let profiles = [
+ release,
+ dev,
+ test,
+ bench,
+ doc,
+ custom_build,
+ test_deps,
+ bench_deps,
+ check,
+ check_test,
+ doctest,
+ ];
for profile in profiles.iter() {
units.push(Unit {
pkg,
} else {
rm_rf(&cx.build_script_dir(unit), config)?;
}
- continue
+ continue;
}
for &(ref src, ref link_dst, _) in cx.target_filenames(unit)?.iter() {
fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> {
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
- config.shell().verbose(|shell| {shell.status("Removing", path.display())})?;
- paths::remove_dir_all(path).chain_err(|| {
- format_err!("could not remove build directory")
- })?;
+ config
+ .shell()
+ .verbose(|shell| shell.status("Removing", path.display()))?;
+ paths::remove_dir_all(path).chain_err(|| format_err!("could not remove build directory"))?;
} else if m.is_ok() {
- config.shell().verbose(|shell| {shell.status("Removing", path.display())})?;
- paths::remove_file(path).chain_err(|| {
- format_err!("failed to remove build artifact")
- })?;
+ config
+ .shell()
+ .verbose(|shell| shell.status("Removing", path.display()))?;
+ paths::remove_file(path).chain_err(|| format_err!("failed to remove build artifact"))?;
}
Ok(())
}
use std::path::PathBuf;
use std::sync::Arc;
-use core::{Source, Package, Target};
-use core::{Profile, TargetKind, Profiles, Workspace, PackageId, PackageIdSpec};
-use core::resolver::{Resolve, Method};
-use ops::{self, BuildOutput, Executor, DefaultExecutor};
+use core::{Package, Source, Target};
+use core::{PackageId, PackageIdSpec, Profile, Profiles, TargetKind, Workspace};
+use core::resolver::{Method, Resolve};
+use ops::{self, BuildOutput, DefaultExecutor, Executor};
use util::config::Config;
-use util::{CargoResult, profile};
+use util::{profile, CargoResult};
/// Contains information about how a package should be compiled.
#[derive(Debug)]
}
impl<'a> CompileOptions<'a> {
- pub fn default(config: &'a Config, mode: CompileMode) -> CompileOptions<'a>
- {
+ pub fn default(config: &'a Config, mode: CompileMode) -> CompileOptions<'a> {
CompileOptions {
config,
jobs: None,
spec: ops::Packages::Packages(Vec::new()),
mode,
release: false,
- filter: CompileFilter::Default { required_features_filterable: false },
+ filter: CompileFilter::Default {
+ required_features_filterable: false,
+ },
message_format: MessageFormat::Human,
target_rustdoc_args: None,
target_rustc_args: None,
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MessageFormat {
Human,
- Json
+ Json,
}
#[derive(Clone, PartialEq, Eq, Debug)]
}
impl Packages {
- pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>)
- -> CargoResult<Self>
- {
+ pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self> {
Ok(match (all, exclude.len(), package.len()) {
(false, 0, 0) => Packages::Default,
(false, 0, _) => Packages::Packages(package),
pub fn into_package_id_specs(&self, ws: &Workspace) -> CargoResult<Vec<PackageIdSpec>> {
let specs = match *self {
- Packages::All => {
- ws.members()
- .map(Package::package_id)
- .map(PackageIdSpec::from_package_id)
- .collect()
- }
- Packages::OptOut(ref opt_out) => {
- ws.members()
- .map(Package::package_id)
- .map(PackageIdSpec::from_package_id)
- .filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none())
- .collect()
- }
- Packages::Packages(ref packages) if packages.is_empty() => {
- ws.current_opt()
- .map(Package::package_id)
- .map(PackageIdSpec::from_package_id)
- .into_iter().collect()
- }
- Packages::Packages(ref packages) => {
- packages.iter().map(|p| PackageIdSpec::parse(p)).collect::<CargoResult<Vec<_>>>()?
- }
- Packages::Default => {
- ws.default_members()
- .map(Package::package_id)
- .map(PackageIdSpec::from_package_id)
- .collect()
- }
+ Packages::All => ws.members()
+ .map(Package::package_id)
+ .map(PackageIdSpec::from_package_id)
+ .collect(),
+ Packages::OptOut(ref opt_out) => ws.members()
+ .map(Package::package_id)
+ .map(PackageIdSpec::from_package_id)
+ .filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none())
+ .collect(),
+ Packages::Packages(ref packages) if packages.is_empty() => ws.current_opt()
+ .map(Package::package_id)
+ .map(PackageIdSpec::from_package_id)
+ .into_iter()
+ .collect(),
+ Packages::Packages(ref packages) => packages
+ .iter()
+ .map(|p| PackageIdSpec::parse(p))
+ .collect::<CargoResult<Vec<_>>>()?,
+ Packages::Default => ws.default_members()
+ .map(Package::package_id)
+ .map(PackageIdSpec::from_package_id)
+ .collect(),
};
if specs.is_empty() {
if ws.is_virtual() {
- bail!("manifest path `{}` contains no package: The manifest is virtual, \
- and the workspace has no members.", ws.root().display())
+ bail!(
+ "manifest path `{}` contains no package: The manifest is virtual, \
+ and the workspace has no members.",
+ ws.root().display()
+ )
}
bail!("no packages to compile")
}
examples: FilterRule,
tests: FilterRule,
benches: FilterRule,
- }
+ },
}
-pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions<'a>)
- -> CargoResult<ops::Compilation<'a>> {
+pub fn compile<'a>(
+ ws: &Workspace<'a>,
+ options: &CompileOptions<'a>,
+) -> CargoResult<ops::Compilation<'a>> {
compile_with_exec(ws, options, Arc::new(DefaultExecutor))
}
-pub fn compile_with_exec<'a>(ws: &Workspace<'a>,
- options: &CompileOptions<'a>,
- exec: Arc<Executor>)
- -> CargoResult<ops::Compilation<'a>> {
+pub fn compile_with_exec<'a>(
+ ws: &Workspace<'a>,
+ options: &CompileOptions<'a>,
+ exec: Arc<Executor>,
+) -> CargoResult<ops::Compilation<'a>> {
for member in ws.members() {
for warning in member.manifest().warnings().iter() {
if warning.is_critical {
let err = format_err!("{}", warning.message);
- let cx = format_err!("failed to parse manifest at `{}`",
- member.manifest_path().display());
- return Err(err.context(cx).into())
+ let cx = format_err!(
+ "failed to parse manifest at `{}`",
+ member.manifest_path().display()
+ );
+ return Err(err.context(cx).into());
} else {
options.config.shell().warn(&warning.message)?
}
compile_ws(ws, None, options, exec)
}
-pub fn compile_ws<'a>(ws: &Workspace<'a>,
- source: Option<Box<Source + 'a>>,
- options: &CompileOptions<'a>,
- exec: Arc<Executor>)
- -> CargoResult<ops::Compilation<'a>> {
- let CompileOptions { config, jobs, ref target, ref spec, ref features,
- all_features, no_default_features,
- release, mode, message_format,
- ref filter,
- ref target_rustdoc_args,
- ref target_rustc_args } = *options;
+pub fn compile_ws<'a>(
+ ws: &Workspace<'a>,
+ source: Option<Box<Source + 'a>>,
+ options: &CompileOptions<'a>,
+ exec: Arc<Executor>,
+) -> CargoResult<ops::Compilation<'a>> {
+ let CompileOptions {
+ config,
+ jobs,
+ ref target,
+ ref spec,
+ ref features,
+ all_features,
+ no_default_features,
+ release,
+ mode,
+ message_format,
+ ref filter,
+ ref target_rustdoc_args,
+ ref target_rustc_args,
+ } = *options;
let target = target.clone();
all_features,
uses_default_features: !no_default_features,
};
- let resolve = ops::resolve_ws_with_method(ws,
- source,
- method,
- &specs,
- )?;
+ let resolve = ops::resolve_ws_with_method(ws, source, method, &specs)?;
let (packages, resolve_with_overrides) = resolve;
- let to_builds = specs.iter().map(|p| {
- let pkgid = p.query(resolve_with_overrides.iter())?;
- let p = packages.get(pkgid)?;
- p.manifest().print_teapot(ws.config());
- Ok(p)
- }).collect::<CargoResult<Vec<_>>>()?;
+ let to_builds = specs
+ .iter()
+ .map(|p| {
+ let pkgid = p.query(resolve_with_overrides.iter())?;
+ let p = packages.get(pkgid)?;
+ p.manifest().print_teapot(ws.config());
+ Ok(p)
+ })
+ .collect::<CargoResult<Vec<_>>>()?;
let mut general_targets = Vec::new();
let mut package_targets = Vec::new();
match (target_rustc_args, target_rustdoc_args) {
- (&Some(..), _) |
- (_, &Some(..)) if to_builds.len() != 1 => {
+ (&Some(..), _) | (_, &Some(..)) if to_builds.len() != 1 => {
panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags")
}
(&Some(ref args), _) => {
- let all_features = resolve_all_features(&resolve_with_overrides,
- to_builds[0].package_id());
- let targets = generate_targets(to_builds[0], profiles,
- mode, filter, &all_features, release)?;
+ let all_features =
+ resolve_all_features(&resolve_with_overrides, to_builds[0].package_id());
+ let targets =
+ generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?;
if targets.len() == 1 {
let (target, profile) = targets[0];
let mut profile = profile.clone();
profile.rustc_args = Some(args.to_vec());
general_targets.push((target, profile));
} else {
- bail!("extra arguments to `rustc` can only be passed to one \
- target, consider filtering\nthe package by passing \
- e.g. `--lib` or `--bin NAME` to specify a single target")
+ bail!(
+ "extra arguments to `rustc` can only be passed to one \
+ target, consider filtering\nthe package by passing \
+ e.g. `--lib` or `--bin NAME` to specify a single target"
+ )
}
}
(&None, &Some(ref args)) => {
- let all_features = resolve_all_features(&resolve_with_overrides,
- to_builds[0].package_id());
- let targets = generate_targets(to_builds[0], profiles,
- mode, filter, &all_features, release)?;
+ let all_features =
+ resolve_all_features(&resolve_with_overrides, to_builds[0].package_id());
+ let targets =
+ generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?;
if targets.len() == 1 {
let (target, profile) = targets[0];
let mut profile = profile.clone();
profile.rustdoc_args = Some(args.to_vec());
general_targets.push((target, profile));
} else {
- bail!("extra arguments to `rustdoc` can only be passed to one \
- target, consider filtering\nthe package by passing e.g. \
- `--lib` or `--bin NAME` to specify a single target")
- }
- }
- (&None, &None) => {
- for &to_build in to_builds.iter() {
- let all_features = resolve_all_features(&resolve_with_overrides,
- to_build.package_id());
- let targets = generate_targets(to_build, profiles, mode,
- filter, &all_features, release)?;
- package_targets.push((to_build, targets));
+ bail!(
+ "extra arguments to `rustdoc` can only be passed to one \
+ target, consider filtering\nthe package by passing e.g. \
+ `--lib` or `--bin NAME` to specify a single target"
+ )
}
}
+ (&None, &None) => for &to_build in to_builds.iter() {
+ let all_features = resolve_all_features(&resolve_with_overrides, to_build.package_id());
+ let targets =
+ generate_targets(to_build, profiles, mode, filter, &all_features, release)?;
+ package_targets.push((to_build, targets));
+ },
};
for &(target, ref profile) in &general_targets {
build_config.doc_all = deps;
}
- ops::compile_targets(ws,
- &package_targets,
- &packages,
- &resolve_with_overrides,
- config,
- build_config,
- profiles,
- &exec)?
+ ops::compile_targets(
+ ws,
+ &package_targets,
+ &packages,
+ &resolve_with_overrides,
+ config,
+ build_config,
+ profiles,
+ &exec,
+ )?
};
ret.to_doc_test = to_builds.into_iter().cloned().collect();
return Ok(ret);
- fn resolve_all_features(resolve_with_overrides: &Resolve,
- package_id: &PackageId)
- -> HashSet<String> {
+ fn resolve_all_features(
+ resolve_with_overrides: &Resolve,
+ package_id: &PackageId,
+ ) -> HashSet<String> {
let mut features = resolve_with_overrides.features(package_id).clone();
// Include features enabled for use by dependencies so targets can also use them with the
fn matches(&self, target: &Target) -> bool {
match *self {
FilterRule::All => true,
- FilterRule::Just(ref targets) => {
- targets.iter().any(|x| *x == target.name())
- },
+ FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()),
}
}
}
impl CompileFilter {
- pub fn new(lib_only: bool,
- bins: Vec<String>, all_bins: bool,
- tsts: Vec<String>, all_tsts: bool,
- exms: Vec<String>, all_exms: bool,
- bens: Vec<String>, all_bens: bool,
- all_targets: bool) -> CompileFilter {
+ pub fn new(
+ lib_only: bool,
+ bins: Vec<String>,
+ all_bins: bool,
+ tsts: Vec<String>,
+ all_tsts: bool,
+ exms: Vec<String>,
+ all_exms: bool,
+ bens: Vec<String>,
+ all_bens: bool,
+ all_targets: bool,
+ ) -> CompileFilter {
let rule_bins = FilterRule::new(bins, all_bins);
let rule_tsts = FilterRule::new(tsts, all_tsts);
let rule_exms = FilterRule::new(exms, all_exms);
if all_targets {
CompileFilter::Only {
all_targets: true,
- lib: true, bins: FilterRule::All,
- examples: FilterRule::All, benches: FilterRule::All,
+ lib: true,
+ bins: FilterRule::All,
+ examples: FilterRule::All,
+ benches: FilterRule::All,
tests: FilterRule::All,
}
} else if lib_only || rule_bins.is_specific() || rule_tsts.is_specific()
- || rule_exms.is_specific() || rule_bens.is_specific() {
+ || rule_exms.is_specific() || rule_bens.is_specific()
+ {
CompileFilter::Only {
all_targets: false,
- lib: lib_only, bins: rule_bins,
- examples: rule_exms, benches: rule_bens,
+ lib: lib_only,
+ bins: rule_bins,
+ examples: rule_exms,
+ benches: rule_bens,
tests: rule_tsts,
}
} else {
pub fn need_dev_deps(&self) -> bool {
match *self {
CompileFilter::Default { .. } => true,
- CompileFilter::Only { ref examples, ref tests, ref benches, .. } =>
- examples.is_specific() || tests.is_specific() || benches.is_specific()
+ CompileFilter::Only {
+ ref examples,
+ ref tests,
+ ref benches,
+ ..
+ } => examples.is_specific() || tests.is_specific() || benches.is_specific(),
}
}
pub fn matches(&self, target: &Target) -> bool {
match *self {
CompileFilter::Default { .. } => true,
- CompileFilter::Only { lib, ref bins, ref examples, ref tests, ref benches, .. } => {
+ CompileFilter::Only {
+ lib,
+ ref bins,
+ ref examples,
+ ref tests,
+ ref benches,
+ ..
+ } => {
let rule = match *target.kind() {
TargetKind::Bin => bins,
TargetKind::Test => tests,
TargetKind::Bench => benches,
- TargetKind::ExampleBin |
- TargetKind::ExampleLib(..) => examples,
+ TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples,
TargetKind::Lib(..) => return lib,
TargetKind::CustomBuild => return false,
};
required: bool,
}
-fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
- profile: &'a Profile,
- dep: &'a Profile,
- required_features_filterable: bool) -> Vec<BuildProposal<'a>> {
+fn generate_auto_targets<'a>(
+ mode: CompileMode,
+ targets: &'a [Target],
+ profile: &'a Profile,
+ dep: &'a Profile,
+ required_features_filterable: bool,
+) -> Vec<BuildProposal<'a>> {
match mode {
- CompileMode::Bench => {
- targets.iter().filter(|t| t.benched()).map(|t| {
- BuildProposal {
- target: t,
- profile,
- required: !required_features_filterable,
- }
- }).collect::<Vec<_>>()
- }
+ CompileMode::Bench => targets
+ .iter()
+ .filter(|t| t.benched())
+ .map(|t| BuildProposal {
+ target: t,
+ profile,
+ required: !required_features_filterable,
+ })
+ .collect::<Vec<_>>(),
CompileMode::Test => {
- let mut base = targets.iter().filter(|t| {
- t.tested()
- }).map(|t| {
- BuildProposal {
+ let mut base = targets
+ .iter()
+ .filter(|t| t.tested())
+ .map(|t| BuildProposal {
target: t,
- profile: if t.is_example() {dep} else {profile},
+ profile: if t.is_example() { dep } else { profile },
required: !required_features_filterable,
- }
- }).collect::<Vec<_>>();
+ })
+ .collect::<Vec<_>>();
// Always compile the library if we're testing everything as
// it'll be needed for doctests
}
base
}
- CompileMode::Build | CompileMode::Check{..} => {
- targets.iter().filter(|t| {
- t.is_bin() || t.is_lib()
- }).map(|t| BuildProposal {
+ CompileMode::Build | CompileMode::Check { .. } => targets
+ .iter()
+ .filter(|t| t.is_bin() || t.is_lib())
+ .map(|t| BuildProposal {
target: t,
profile,
required: !required_features_filterable,
- }).collect()
- }
- CompileMode::Doc { .. } => {
- targets.iter().filter(|t| {
- t.documented() && (
- !t.is_bin() ||
- !targets.iter().any(|l| l.is_lib() && l.name() == t.name())
- )
- }).map(|t| BuildProposal {
+ })
+ .collect(),
+ CompileMode::Doc { .. } => targets
+ .iter()
+ .filter(|t| {
+ t.documented()
+ && (!t.is_bin() || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
+ })
+ .map(|t| BuildProposal {
target: t,
profile,
required: !required_features_filterable,
- }).collect()
- }
+ })
+ .collect(),
CompileMode::Doctest => {
if let Some(t) = targets.iter().find(|t| t.is_lib()) {
if t.doctested() {
- return vec![BuildProposal {
- target: t,
- profile,
- required: !required_features_filterable,
- }];
+ return vec![
+ BuildProposal {
+ target: t,
+ profile,
+ required: !required_features_filterable,
+ },
+ ];
}
}
}
/// Given a filter rule and some context, propose a list of targets
-fn propose_indicated_targets<'a>(pkg: &'a Package,
- rule: &FilterRule,
- desc: &'static str,
- is_expected_kind: fn(&Target) -> bool,
- profile: &'a Profile) -> CargoResult<Vec<BuildProposal<'a>>> {
+fn propose_indicated_targets<'a>(
+ pkg: &'a Package,
+ rule: &FilterRule,
+ desc: &'static str,
+ is_expected_kind: fn(&Target) -> bool,
+ profile: &'a Profile,
+) -> CargoResult<Vec<BuildProposal<'a>>> {
match *rule {
FilterRule::All => {
- let result = pkg.targets().iter().filter(|t| is_expected_kind(t)).map(|t| {
- BuildProposal {
+ let result = pkg.targets()
+ .iter()
+ .filter(|t| is_expected_kind(t))
+ .map(|t| BuildProposal {
target: t,
profile,
required: false,
- }
- });
+ });
Ok(result.collect())
}
FilterRule::Just(ref names) => {
let mut targets = Vec::new();
for name in names {
- let target = pkg.targets().iter().find(|t| {
- t.name() == *name && is_expected_kind(t)
- });
+ let target = pkg.targets()
+ .iter()
+ .find(|t| t.name() == *name && is_expected_kind(t));
let t = match target {
Some(t) => t,
None => {
match suggestion {
Some(s) => {
let suggested_name = s.name();
- bail!("no {} target named `{}`\n\nDid you mean `{}`?",
- desc, name, suggested_name)
+ bail!(
+ "no {} target named `{}`\n\nDid you mean `{}`?",
+ desc,
+ name,
+ suggested_name
+ )
}
None => bail!("no {} target named `{}`", desc, name),
}
}
/// Collect the targets that are libraries or have all required features available.
-fn filter_compatible_targets<'a>(mut proposals: Vec<BuildProposal<'a>>,
- features: &HashSet<String>)
- -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
+fn filter_compatible_targets<'a>(
+ mut proposals: Vec<BuildProposal<'a>>,
+ features: &HashSet<String>,
+) -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let mut compatible = Vec::with_capacity(proposals.len());
for proposal in proposals.drain(..) {
let unavailable_features = match proposal.target.required_features() {
compatible.push((proposal.target, proposal.profile));
} else if proposal.required {
let required_features = proposal.target.required_features().unwrap();
- let quoted_required_features: Vec<String> = required_features.iter()
- .map(|s| format!("`{}`",s))
- .collect();
- bail!("target `{}` requires the features: {}\n\
- Consider enabling them by passing e.g. `--features=\"{}\"`",
- proposal.target.name(),
- quoted_required_features.join(", "),
- required_features.join(" "));
+ let quoted_required_features: Vec<String> = required_features
+ .iter()
+ .map(|s| format!("`{}`", s))
+ .collect();
+ bail!(
+ "target `{}` requires the features: {}\n\
+ Consider enabling them by passing e.g. `--features=\"{}\"`",
+ proposal.target.name(),
+ quoted_required_features.join(", "),
+ required_features.join(" ")
+ );
}
}
Ok(compatible)
/// Given the configuration for a build, this function will generate all
/// target/profile combinations needed to be built.
-fn generate_targets<'a>(pkg: &'a Package,
- profiles: &'a Profiles,
- mode: CompileMode,
- filter: &CompileFilter,
- features: &HashSet<String>,
- release: bool)
- -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
- let build = if release {&profiles.release} else {&profiles.dev};
- let test = if release {&profiles.bench} else {&profiles.test};
+fn generate_targets<'a>(
+ pkg: &'a Package,
+ profiles: &'a Profiles,
+ mode: CompileMode,
+ filter: &CompileFilter,
+ features: &HashSet<String>,
+ release: bool,
+) -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
+ let build = if release {
+ &profiles.release
+ } else {
+ &profiles.dev
+ };
+ let test = if release {
+ &profiles.bench
+ } else {
+ &profiles.test
+ };
let profile = match mode {
CompileMode::Test => test,
CompileMode::Bench => &profiles.bench,
CompileMode::Build => build,
- CompileMode::Check {test: false} => &profiles.check,
- CompileMode::Check {test: true} => &profiles.check_test,
+ CompileMode::Check { test: false } => &profiles.check,
+ CompileMode::Check { test: true } => &profiles.check_test,
CompileMode::Doc { .. } => &profiles.doc,
CompileMode::Doctest => &profiles.doctest,
};
};
let targets = match *filter {
- CompileFilter::Default { required_features_filterable } => {
+ CompileFilter::Default {
+ required_features_filterable,
+ } => {
let deps = if release {
&profiles.bench_deps
} else {
&profiles.test_deps
};
- generate_auto_targets(mode, pkg.targets(), profile, deps, required_features_filterable)
- }
- CompileFilter::Only { all_targets, lib, ref bins, ref examples, ref tests, ref benches } => {
+ generate_auto_targets(
+ mode,
+ pkg.targets(),
+ profile,
+ deps,
+ required_features_filterable,
+ )
+ }
+ CompileFilter::Only {
+ all_targets,
+ lib,
+ ref bins,
+ ref examples,
+ ref tests,
+ ref benches,
+ } => {
let mut targets = Vec::new();
if lib {
}
}
targets.append(&mut propose_indicated_targets(
- pkg, bins, "bin", Target::is_bin, profile)?);
+ pkg,
+ bins,
+ "bin",
+ Target::is_bin,
+ profile,
+ )?);
targets.append(&mut propose_indicated_targets(
- pkg, examples, "example", Target::is_example, profile)?);
+ pkg,
+ examples,
+ "example",
+ Target::is_example,
+ profile,
+ )?);
// If --tests was specified, add all targets that would be
// generated by `cargo test`.
let test_filter = match *tests {
FilterRule::All => Target::tested,
- FilterRule::Just(_) => Target::is_test
+ FilterRule::Just(_) => Target::is_test,
};
targets.append(&mut propose_indicated_targets(
- pkg, tests, "test", test_filter, test_profile)?);
+ pkg,
+ tests,
+ "test",
+ test_filter,
+ test_profile,
+ )?);
// If --benches was specified, add all targets that would be
// generated by `cargo bench`.
let bench_filter = match *benches {
FilterRule::All => Target::benched,
- FilterRule::Just(_) => Target::is_bench
+ FilterRule::Just(_) => Target::is_bench,
};
targets.append(&mut propose_indicated_targets(
- pkg, benches, "bench", bench_filter, bench_profile)?);
+ pkg,
+ benches,
+ "bench",
+ bench_filter,
+ bench_profile,
+ )?);
targets
}
};
/// * target.$target.ar
/// * target.$target.linker
/// * target.$target.libfoo.metadata
-fn scrape_build_config(config: &Config,
- jobs: Option<u32>,
- target: Option<String>)
- -> CargoResult<ops::BuildConfig> {
+fn scrape_build_config(
+ config: &Config,
+ jobs: Option<u32>,
+ target: Option<String>,
+) -> CargoResult<ops::BuildConfig> {
if jobs.is_some() && config.jobserver_from_env().is_some() {
- config.shell().warn("a `-j` argument was passed to Cargo but Cargo is \
- also configured with an external jobserver in \
- its environment, ignoring the `-j` parameter")?;
+ config.shell().warn(
+ "a `-j` argument was passed to Cargo but Cargo is \
+ also configured with an external jobserver in \
+ its environment, ignoring the `-j` parameter",
+ )?;
}
let cfg_jobs = match config.get_i64("build.jobs")? {
Some(v) => {
if v.val <= 0 {
- bail!("build.jobs must be positive, but found {} in {}",
- v.val, v.definition)
+ bail!(
+ "build.jobs must be positive, but found {} in {}",
+ v.val,
+ v.definition
+ )
} else if v.val >= i64::from(u32::max_value()) {
- bail!("build.jobs is too large: found {} in {}", v.val,
- v.definition)
+ bail!(
+ "build.jobs is too large: found {} in {}",
+ v.val,
+ v.definition
+ )
} else {
Some(v.val as u32)
}
Ok(base)
}
-fn scrape_target_config(config: &Config, triple: &str)
- -> CargoResult<ops::TargetConfig> {
-
+fn scrape_target_config(config: &Config, triple: &str) -> CargoResult<ops::TargetConfig> {
let key = format!("target.{}", triple);
let mut ret = ops::TargetConfig {
ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val),
};
for (lib_name, value) in table {
match lib_name.as_str() {
- "ar" | "linker" | "runner" | "rustflags" => {
- continue
- },
+ "ar" | "linker" | "runner" | "rustflags" => continue,
_ => {}
}
// We require deterministic order of evaluation, so we must sort the pairs by key first.
let mut pairs = Vec::new();
for (k, value) in value.table(&lib_name)?.0 {
- pairs.push((k,value));
+ pairs.push((k, value));
}
- pairs.sort_by_key( |p| p.0 );
- for (k,value) in pairs{
+ pairs.sort_by_key(|p| p.0);
+ for (k, value) in pairs {
let key = format!("{}.{}", key, k);
match &k[..] {
"rustc-flags" => {
let (flags, definition) = value.string(k)?;
- let whence = format!("in `{}` (in {})", key,
- definition.display());
- let (paths, links) =
- BuildOutput::parse_rustc_flags(flags, &whence)
- ?;
+ let whence = format!("in `{}` (in {})", key, definition.display());
+ let (paths, links) = BuildOutput::parse_rustc_flags(flags, &whence)?;
output.library_paths.extend(paths);
output.library_links.extend(links);
}
"rustc-link-lib" => {
let list = value.list(k)?;
- output.library_links.extend(list.iter()
- .map(|v| v.0.clone()));
+ output
+ .library_links
+ .extend(list.iter().map(|v| v.0.clone()));
}
"rustc-link-search" => {
let list = value.list(k)?;
- output.library_paths.extend(list.iter().map(|v| {
- PathBuf::from(&v.0)
- }));
+ output
+ .library_paths
+ .extend(list.iter().map(|v| PathBuf::from(&v.0)));
}
"rustc-cfg" => {
let list = value.list(k)?;
output.cfgs.extend(list.iter().map(|v| v.0.clone()));
}
- "rustc-env" => {
- for (name, val) in value.table(k)?.0 {
- let val = val.string(name)?.0;
- output.env.push((name.clone(), val.to_string()));
- }
- }
- "warning" |
- "rerun-if-changed" |
- "rerun-if-env-changed" => {
+ "rustc-env" => for (name, val) in value.table(k)?.0 {
+ let val = val.string(name)?.0;
+ output.env.push((name.clone(), val.to_string()));
+ },
+ "warning" | "rerun-if-changed" | "rerun-if-env-changed" => {
bail!("`{}` is not supported in build script overrides", k);
}
_ => {
pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
let specs = options.compile_opts.spec.into_package_id_specs(ws)?;
- let resolve = ops::resolve_ws_precisely(ws,
- None,
- &options.compile_opts.features,
- options.compile_opts.all_features,
- options.compile_opts.no_default_features,
- &specs)?;
+ let resolve = ops::resolve_ws_precisely(
+ ws,
+ None,
+ &options.compile_opts.features,
+ options.compile_opts.all_features,
+ options.compile_opts.no_default_features,
+ &specs,
+ )?;
let (packages, resolve_with_overrides) = resolve;
- let pkgs = specs.iter().map(|p| {
- let pkgid = p.query(resolve_with_overrides.iter())?;
- packages.get(pkgid)
- }).collect::<CargoResult<Vec<_>>>()?;
+ let pkgs = specs
+ .iter()
+ .map(|p| {
+ let pkgid = p.query(resolve_with_overrides.iter())?;
+ packages.get(pkgid)
+ })
+ .collect::<CargoResult<Vec<_>>>()?;
let mut lib_names = HashMap::new();
let mut bin_names = HashMap::new();
for target in package.targets().iter().filter(|t| t.documented()) {
if target.is_lib() {
if let Some(prev) = lib_names.insert(target.crate_name(), package) {
- bail!("The library `{}` is specified by packages `{}` and \
- `{}` but can only be documented once. Consider renaming \
- or marking one of the targets as `doc = false`.",
- target.crate_name(), prev, package);
+ bail!(
+ "The library `{}` is specified by packages `{}` and \
+ `{}` but can only be documented once. Consider renaming \
+ or marking one of the targets as `doc = false`.",
+ target.crate_name(),
+ prev,
+ package
+ );
}
} else if let Some(prev) = bin_names.insert(target.crate_name(), package) {
- bail!("The binary `{}` is specified by packages `{}` and \
- `{}` but can be documented only once. Consider renaming \
- or marking one of the targets as `doc = false`.",
- target.crate_name(), prev, package);
+ bail!(
+ "The binary `{}` is specified by packages `{}` and \
+ `{}` but can be documented only once. Consider renaming \
+ or marking one of the targets as `doc = false`.",
+ target.crate_name(),
+ prev,
+ package
+ );
}
}
}
if options.open_result {
let name = if pkgs.len() > 1 {
- bail!("Passing multiple packages and `open` is not supported.\n\
- Please re-run this command with `-p <spec>` where `<spec>` \
- is one of the following:\n {}",
- pkgs.iter().map(|p| p.name().to_inner()).collect::<Vec<_>>().join("\n "));
+ bail!(
+ "Passing multiple packages and `open` is not supported.\n\
+ Please re-run this command with `-p <spec>` where `<spec>` \
+ is one of the following:\n {}",
+ pkgs.iter()
+ .map(|p| p.name().to_inner())
+ .collect::<Vec<_>>()
+ .join("\n ")
+ );
} else if pkgs.len() == 1 {
pkgs[0].name().replace("-", "_")
} else {
match open_docs(&path) {
Ok(m) => shell.status("Launching", m)?,
Err(e) => {
- shell.warn(
- "warning: could not determine a browser to open docs with, tried:")?;
+ shell.warn("warning: could not determine a browser to open docs with, tried:")?;
for method in e {
shell.warn(format!("\t{}", method))?;
}
-use core::{Resolve, PackageSet, Workspace};
+use core::{PackageSet, Resolve, Workspace};
use ops;
use util::CargoResult;
pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> {
let mut registry = PackageRegistry::new(ws.config())?;
- let resolve = ops::resolve_with_previous(&mut registry,
- ws,
- Method::Everything,
- None,
- None,
- &[],
- true,
- true)?;
+ let resolve = ops::resolve_with_previous(
+ &mut registry,
+ ws,
+ Method::Everything,
+ None,
+ None,
+ &[],
+ true,
+ true,
+ )?;
ops::write_pkg_lockfile(ws, &resolve)?;
Ok(())
}
-pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
- -> CargoResult<()> {
-
+pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()> {
if opts.aggressive && opts.precise.is_some() {
bail!("cannot specify both aggressive and precise simultaneously")
}
for name in opts.to_update.iter() {
let dep = previous_resolve.query(name)?;
if opts.aggressive {
- fill_with_deps(&previous_resolve, dep, &mut to_avoid,
- &mut HashSet::new());
+ fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new());
} else {
to_avoid.insert(dep);
sources.push(match opts.precise {
};
dep.source_id().clone().with_precise(Some(precise))
}
- None => {
- dep.source_id().clone().with_precise(None)
- }
+ None => dep.source_id().clone().with_precise(None),
});
}
}
registry.add_sources(&sources)?;
}
- let resolve = ops::resolve_with_previous(&mut registry,
- ws,
- Method::Everything,
- Some(&previous_resolve),
- Some(&to_avoid),
- &[],
- true,
- true)?;
+ let resolve = ops::resolve_with_previous(
+ &mut registry,
+ ws,
+ Method::Everything,
+ Some(&previous_resolve),
+ Some(&to_avoid),
+ &[],
+ true,
+ true,
+ )?;
// Summarize what is changing for the user.
let print_change = |status: &str, msg: String, color: Color| {
for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) {
if removed.len() == 1 && added.len() == 1 {
let msg = if removed[0].source_id().is_git() {
- format!("{} -> #{}", removed[0],
- &added[0].source_id().precise().unwrap()[..8])
+ format!(
+ "{} -> #{}",
+ removed[0],
+ &added[0].source_id().precise().unwrap()[..8]
+ )
} else {
format!("{} -> v{}", removed[0], added[0].version())
};
ops::write_pkg_lockfile(ws, &resolve)?;
return Ok(());
- fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
- set: &mut HashSet<&'a PackageId>,
- visited: &mut HashSet<&'a PackageId>) {
+ fn fill_with_deps<'a>(
+ resolve: &'a Resolve,
+ dep: &'a PackageId,
+ set: &mut HashSet<&'a PackageId>,
+ visited: &mut HashSet<&'a PackageId>,
+ ) {
if !visited.insert(dep) {
- return
+ return;
}
set.insert(dep);
for dep in resolve.deps(dep) {
}
}
- fn compare_dependency_graphs<'a>(previous_resolve: &'a Resolve,
- resolve: &'a Resolve) ->
- Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> {
+ fn compare_dependency_graphs<'a>(
+ previous_resolve: &'a Resolve,
+ resolve: &'a Resolve,
+ ) -> Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> {
fn key(dep: &PackageId) -> (&str, &SourceId) {
(dep.name().to_inner(), dep.source_id())
}
// more complicated because the equality for source ids does not take
// precise versions into account (e.g. git shas), but we want to take
// that into account here.
- fn vec_subtract<'a>(a: &[&'a PackageId],
- b: &[&'a PackageId]) -> Vec<&'a PackageId> {
- a.iter().filter(|a| {
- // If this package id is not found in `b`, then it's definitely
- // in the subtracted set
- let i = match b.binary_search(a) {
- Ok(i) => i,
- Err(..) => return true,
- };
-
- // If we've found `a` in `b`, then we iterate over all instances
- // (we know `b` is sorted) and see if they all have different
- // precise versions. If so, then `a` isn't actually in `b` so
- // we'll let it through.
- //
- // Note that we only check this for non-registry sources,
- // however, as registries contain enough version information in
- // the package id to disambiguate
- if a.source_id().is_registry() {
- return false
- }
- b[i..].iter().take_while(|b| a == b).all(|b| {
- a.source_id().precise() != b.source_id().precise()
+ fn vec_subtract<'a>(a: &[&'a PackageId], b: &[&'a PackageId]) -> Vec<&'a PackageId> {
+ a.iter()
+ .filter(|a| {
+ // If this package id is not found in `b`, then it's definitely
+ // in the subtracted set
+ let i = match b.binary_search(a) {
+ Ok(i) => i,
+ Err(..) => return true,
+ };
+
+ // If we've found `a` in `b`, then we iterate over all instances
+ // (we know `b` is sorted) and see if they all have different
+ // precise versions. If so, then `a` isn't actually in `b` so
+ // we'll let it through.
+ //
+ // Note that we only check this for non-registry sources,
+ // however, as registries contain enough version information in
+ // the package id to disambiguate
+ if a.source_id().is_registry() {
+ return false;
+ }
+ b[i..]
+ .iter()
+ .take_while(|b| a == b)
+ .all(|b| a.source_id().precise() != b.source_id().precise())
})
- }).cloned().collect()
+ .cloned()
+ .collect()
}
// Map (package name, package source) to (removed versions, added versions).
let mut changes = BTreeMap::new();
let empty = (Vec::new(), Vec::new());
for dep in previous_resolve.iter() {
- changes.entry(key(dep)).or_insert_with(||empty.clone()).0.push(dep);
+ changes
+ .entry(key(dep))
+ .or_insert_with(|| empty.clone())
+ .0
+ .push(dep);
}
for dep in resolve.iter() {
- changes.entry(key(dep)).or_insert_with(||empty.clone()).1.push(dep);
+ changes
+ .entry(key(dep))
+ .or_insert_with(|| empty.clone())
+ .1
+ .push(dep);
}
for v in changes.values_mut() {
use tempdir::TempDir;
use toml;
-use core::{SourceId, Source, Package, Dependency, PackageIdSpec};
+use core::{Dependency, Package, PackageIdSpec, Source, SourceId};
use core::{PackageId, Workspace};
use ops::{self, CompileFilter, DefaultExecutor};
use sources::{GitSource, PathSource, SourceConfigMap};
-use util::{Config, internal};
-use util::{Filesystem, FileLock};
+use util::{internal, Config};
+use util::{FileLock, Filesystem};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
}
}
-pub fn install(root: Option<&str>,
- krates: Vec<&str>,
- source_id: &SourceId,
- vers: Option<&str>,
- opts: &ops::CompileOptions,
- force: bool) -> CargoResult<()> {
+pub fn install(
+ root: Option<&str>,
+ krates: Vec<&str>,
+ source_id: &SourceId,
+ vers: Option<&str>,
+ opts: &ops::CompileOptions,
+ force: bool,
+) -> CargoResult<()> {
let root = resolve_root(root, opts.config)?;
let map = SourceConfigMap::new(opts.config)?;
let (installed_anything, scheduled_error) = if krates.len() <= 1 {
- install_one(&root, &map, krates.into_iter().next(), source_id, vers, opts,
- force, true)?;
+ install_one(
+ &root,
+ &map,
+ krates.into_iter().next(),
+ source_id,
+ vers,
+ opts,
+ force,
+ true,
+ )?;
(true, false)
} else {
let mut succeeded = vec![];
for krate in krates {
let root = root.clone();
let map = map.clone();
- match install_one(&root, &map, Some(krate), source_id, vers,
- opts, force, first) {
+ match install_one(
+ &root,
+ &map,
+ Some(krate),
+ source_id,
+ vers,
+ opts,
+ force,
+ first,
+ ) {
Ok(()) => succeeded.push(krate),
Err(e) => {
::handle_error(e, &mut opts.config.shell());
summary.push(format!("Successfully installed {}!", succeeded.join(", ")));
}
if !failed.is_empty() {
- summary.push(format!("Failed to install {} (see error(s) above).", failed.join(", ")));
+ summary.push(format!(
+ "Failed to install {} (see error(s) above).",
+ failed.join(", ")
+ ));
}
if !succeeded.is_empty() || !failed.is_empty() {
opts.config.shell().status("Summary", summary.join(" "))?;
let path = env::var_os("PATH").unwrap_or_default();
for path in env::split_paths(&path) {
if path == dst {
- return Ok(())
+ return Ok(());
}
}
- opts.config.shell().warn(&format!("be sure to add `{}` to your PATH to be \
- able to run the installed binaries",
- dst.display()))?;
+ opts.config.shell().warn(&format!(
+ "be sure to add `{}` to your PATH to be \
+ able to run the installed binaries",
+ dst.display()
+ ))?;
}
if scheduled_error {
Ok(())
}
-fn install_one(root: &Filesystem,
- map: &SourceConfigMap,
- krate: Option<&str>,
- source_id: &SourceId,
- vers: Option<&str>,
- opts: &ops::CompileOptions,
- force: bool,
- is_first_install: bool) -> CargoResult<()> {
-
+fn install_one(
+ root: &Filesystem,
+ map: &SourceConfigMap,
+ krate: Option<&str>,
+ source_id: &SourceId,
+ vers: Option<&str>,
+ opts: &ops::CompileOptions,
+ force: bool,
+ is_first_install: bool,
+) -> CargoResult<()> {
let config = opts.config;
let (pkg, source) = if source_id.is_git() {
- select_pkg(GitSource::new(source_id, config)?,
- krate, vers, config, is_first_install,
- &mut |git| git.read_packages())?
+ select_pkg(
+ GitSource::new(source_id, config)?,
+ krate,
+ vers,
+ config,
+ is_first_install,
+ &mut |git| git.read_packages(),
+ )?
} else if source_id.is_path() {
- let path = source_id.url().to_file_path().map_err(|()| {
- format_err!("path sources must have a valid path")
- })?;
+ let path = source_id
+ .url()
+ .to_file_path()
+ .map_err(|()| format_err!("path sources must have a valid path"))?;
let mut src = PathSource::new(&path, source_id, config);
src.update().chain_err(|| {
- format_err!("`{}` is not a crate root; specify a crate to \
- install from crates.io, or use --path or --git to \
- specify an alternate source", path.display())
+ format_err!(
+ "`{}` is not a crate root; specify a crate to \
+ install from crates.io, or use --path or --git to \
+ specify an alternate source",
+ path.display()
+ )
})?;
- select_pkg(PathSource::new(&path, source_id, config),
- krate, vers, config, is_first_install,
- &mut |path| path.read_packages())?
+ select_pkg(
+ PathSource::new(&path, source_id, config),
+ krate,
+ vers,
+ config,
+ is_first_install,
+ &mut |path| path.read_packages(),
+ )?
} else {
- select_pkg(map.load(source_id)?,
- krate, vers, config, is_first_install,
- &mut |_| {
- bail!("must specify a crate to install from \
- crates.io, or use --path or --git to \
- specify alternate source")
- })?
+ select_pkg(
+ map.load(source_id)?,
+ krate,
+ vers,
+ config,
+ is_first_install,
+ &mut |_| {
+ bail!(
+ "must specify a crate to install from \
+ crates.io, or use --path or --git to \
+ specify alternate source"
+ )
+ },
+ )?
};
let mut td_opt = None;
check_overwrites(&dst, pkg, &opts.filter, &list, force)?;
}
- let compile = ops::compile_ws(&ws,
- Some(source),
- opts,
- Arc::new(DefaultExecutor)).chain_err(|| {
- if let Some(td) = td_opt.take() {
- // preserve the temporary directory, so the user can inspect it
- td.into_path();
- }
+ let compile =
+ ops::compile_ws(&ws, Some(source), opts, Arc::new(DefaultExecutor)).chain_err(|| {
+ if let Some(td) = td_opt.take() {
+ // preserve the temporary directory, so the user can inspect it
+ td.into_path();
+ }
- format_err!("failed to compile `{}`, intermediate artifacts can be \
- found at `{}`", pkg, ws.target_dir().display())
- })?;
- let binaries: Vec<(&str, &Path)> = compile.binaries.iter().map(|bin| {
- let name = bin.file_name().unwrap();
- if let Some(s) = name.to_str() {
- Ok((s, bin.as_ref()))
- } else {
- bail!("Binary `{:?}` name can't be serialized into string", name)
- }
- }).collect::<CargoResult<_>>()?;
+ format_err!(
+ "failed to compile `{}`, intermediate artifacts can be \
+ found at `{}`",
+ pkg,
+ ws.target_dir().display()
+ )
+ })?;
+ let binaries: Vec<(&str, &Path)> = compile
+ .binaries
+ .iter()
+ .map(|bin| {
+ let name = bin.file_name().unwrap();
+ if let Some(s) = name.to_str() {
+ Ok((s, bin.as_ref()))
+ } else {
+ bail!("Binary `{:?}` name can't be serialized into string", name)
+ }
+ })
+ .collect::<CargoResult<_>>()?;
if binaries.is_empty() {
- bail!("no binaries are available for install using the selected \
- features");
+ bail!(
+ "no binaries are available for install using the selected \
+ features"
+ );
}
let metadata = metadata(config, root)?;
let mut list = read_crate_list(&metadata)?;
let dst = metadata.parent().join("bin");
- let duplicates = check_overwrites(&dst, pkg, &opts.filter,
- &list, force)?;
+ let duplicates = check_overwrites(&dst, pkg, &opts.filter, &list, force)?;
fs::create_dir_all(&dst)?;
let dst = staging_dir.path().join(bin);
// Try to move if `target_dir` is transient.
if !source_id.is_path() && fs::rename(src, &dst).is_ok() {
- continue
+ continue;
}
fs::copy(src, &dst).chain_err(|| {
- format_err!("failed to copy `{}` to `{}`", src.display(),
- dst.display())
+ format_err!("failed to copy `{}` to `{}`", src.display(), dst.display())
})?;
}
- let (to_replace, to_install): (Vec<&str>, Vec<&str>) =
- binaries.iter().map(|&(bin, _)| bin)
- .partition(|&bin| duplicates.contains_key(bin));
+ let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries
+ .iter()
+ .map(|&(bin, _)| bin)
+ .partition(|&bin| duplicates.contains_key(bin));
let mut installed = Transaction { bins: Vec::new() };
let dst = dst.join(bin);
config.shell().status("Installing", dst.display())?;
fs::rename(&src, &dst).chain_err(|| {
- format_err!("failed to move `{}` to `{}`", src.display(),
- dst.display())
+ format_err!("failed to move `{}` to `{}`", src.display(), dst.display())
})?;
installed.bins.push(dst);
}
let dst = dst.join(bin);
config.shell().status("Replacing", dst.display())?;
fs::rename(&src, &dst).chain_err(|| {
- format_err!("failed to move `{}` to `{}`", src.display(),
- dst.display())
+ format_err!("failed to move `{}` to `{}`", src.display(), dst.display())
})?;
replaced_names.push(bin);
}
set.remove(bin);
}
}
- list.v1.entry(pkg.package_id().clone())
- .or_insert_with(BTreeSet::new)
- .insert(bin.to_string());
+ list.v1
+ .entry(pkg.package_id().clone())
+ .or_insert_with(BTreeSet::new)
+ .insert(bin.to_string());
}
// Remove empty metadata lines.
- let pkgs = list.v1.iter()
- .filter_map(|(p, set)| if set.is_empty() { Some(p.clone()) } else { None })
- .collect::<Vec<_>>();
+ let pkgs = list.v1
+ .iter()
+ .filter_map(|(p, set)| {
+ if set.is_empty() {
+ Some(p.clone())
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
for p in pkgs.iter() {
list.v1.remove(p);
}
// If installation was successful record newly installed binaries.
if result.is_ok() {
- list.v1.entry(pkg.package_id().clone())
- .or_insert_with(BTreeSet::new)
- .extend(to_install.iter().map(|s| s.to_string()));
+ list.v1
+ .entry(pkg.package_id().clone())
+ .or_insert_with(BTreeSet::new)
+ .extend(to_install.iter().map(|s| s.to_string()));
}
let write_result = write_crate_list(&metadata, list);
Ok(())
}
-fn select_pkg<'a, T>(mut source: T,
- name: Option<&str>,
- vers: Option<&str>,
- config: &Config,
- needs_update: bool,
- list_all: &mut FnMut(&mut T) -> CargoResult<Vec<Package>>)
- -> CargoResult<(Package, Box<Source + 'a>)>
- where T: Source + 'a
+fn select_pkg<'a, T>(
+ mut source: T,
+ name: Option<&str>,
+ vers: Option<&str>,
+ config: &Config,
+ needs_update: bool,
+ list_all: &mut FnMut(&mut T) -> CargoResult<Vec<Package>>,
+) -> CargoResult<(Package, Box<Source + 'a>)>
+where
+ T: Source + 'a,
{
if needs_update {
source.update()?;
Some(name) => {
let vers = match vers {
Some(v) => {
-
// If the version begins with character <, >, =, ^, ~ parse it as a
// version range, otherwise parse it as a specific version
let first = v.chars()
.nth(0)
- .ok_or_else(||format_err!("no version provided for the `--vers` flag"))?;
+ .ok_or_else(|| format_err!("no version provided for the `--vers` flag"))?;
match first {
'<' | '>' | '=' | '^' | '~' => match v.parse::<VersionReq>() {
Ok(v) => Some(v.to_string()),
- Err(_) => {
- bail!("the `--vers` provided, `{}`, is \
+ Err(_) => bail!(
+ "the `--vers` provided, `{}`, is \
not a valid semver version requirement\n\n
Please have a look at \
http://doc.crates.io/specifying-dependencies.html \
- for the correct format", v)
- }
+ for the correct format",
+ v
+ ),
},
_ => match v.parse::<Version>() {
Ok(v) => Some(format!("={}", v)),
Err(_) => {
- let mut msg = format!("\
- the `--vers` provided, `{}`, is \
- not a valid semver version\n\n\
- historically Cargo treated this \
- as a semver version requirement \
- accidentally\nand will continue \
- to do so, but this behavior \
- will be removed eventually", v
+ let mut msg = format!(
+ "\
+ the `--vers` provided, `{}`, is \
+ not a valid semver version\n\n\
+ historically Cargo treated this \
+ as a semver version requirement \
+ accidentally\nand will continue \
+ to do so, but this behavior \
+ will be removed eventually",
+ v
);
// If it is not a valid version but it is a valid version
// requirement, add a note to the warning
if v.parse::<VersionReq>().is_ok() {
- msg.push_str(&format!("\nif you want to specify semver range, \
- add an explicit qualifier, like ^{}", v));
+ msg.push_str(&format!(
+ "\nif you want to specify semver range, \
+ add an explicit qualifier, like ^{}",
+ v
+ ));
}
config.shell().warn(&msg)?;
Some(v.to_string())
}
- }
+ },
}
}
None => None,
}
None => {
let vers_info = vers.map(|v| format!(" with version `{}`", v))
- .unwrap_or_default();
- Err(format_err!("could not find `{}` in {}{}", name,
- source.source_id(), vers_info))
+ .unwrap_or_default();
+ Err(format_err!(
+ "could not find `{}` in {}{}",
+ name,
+ source.source_id(),
+ vers_info
+ ))
}
}
}
None => {
let candidates = list_all(&mut source)?;
- let binaries = candidates.iter().filter(|cand| {
- cand.targets().iter().filter(|t| t.is_bin()).count() > 0
- });
- let examples = candidates.iter().filter(|cand| {
- cand.targets().iter().filter(|t| t.is_example()).count() > 0
- });
+ let binaries = candidates
+ .iter()
+ .filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0);
+ let examples = candidates
+ .iter()
+ .filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0);
let pkg = match one(binaries, |v| multi_err("binaries", v))? {
Some(p) => p,
- None => {
- match one(examples, |v| multi_err("examples", v))? {
- Some(p) => p,
- None => bail!("no packages found with binaries or \
- examples"),
- }
- }
+ None => match one(examples, |v| multi_err("examples", v))? {
+ Some(p) => p,
+ None => bail!(
+ "no packages found with binaries or \
+ examples"
+ ),
+ },
};
return Ok((pkg.clone(), Box::new(source)));
fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String {
pkgs.sort_by(|a, b| a.name().cmp(&b.name()));
- format!("multiple packages with {} found: {}", kind,
- pkgs.iter().map(|p| p.name().to_inner()).collect::<Vec<_>>()
- .join(", "))
+ format!(
+ "multiple packages with {} found: {}",
+ kind,
+ pkgs.iter()
+ .map(|p| p.name().to_inner())
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
}
}
}
}
fn one<I, F>(mut i: I, f: F) -> CargoResult<Option<I::Item>>
- where I: Iterator,
- F: FnOnce(Vec<I::Item>) -> String
+where
+ I: Iterator,
+ F: FnOnce(Vec<I::Item>) -> String,
{
match (i.next(), i.next()) {
(Some(i1), Some(i2)) => {
Err(format_err!("{}", f(v)))
}
(Some(i), None) => Ok(Some(i)),
- (None, _) => Ok(None)
+ (None, _) => Ok(None),
}
}
-fn check_overwrites(dst: &Path,
- pkg: &Package,
- filter: &ops::CompileFilter,
- prev: &CrateListingV1,
- force: bool) -> CargoResult<BTreeMap<String, Option<PackageId>>> {
+fn check_overwrites(
+ dst: &Path,
+ pkg: &Package,
+ filter: &ops::CompileFilter,
+ prev: &CrateListingV1,
+ force: bool,
+) -> CargoResult<BTreeMap<String, Option<PackageId>>> {
// If explicit --bin or --example flags were passed then those'll
// get checked during cargo_compile, we only care about the "build
// everything" case here
}
let duplicates = find_duplicates(dst, pkg, filter, prev);
if force || duplicates.is_empty() {
- return Ok(duplicates)
+ return Ok(duplicates);
}
// Format the error message.
let mut msg = String::new();
Err(format_err!("{}", msg))
}
-fn find_duplicates(dst: &Path,
- pkg: &Package,
- filter: &ops::CompileFilter,
- prev: &CrateListingV1) -> BTreeMap<String, Option<PackageId>> {
+fn find_duplicates(
+ dst: &Path,
+ pkg: &Package,
+ filter: &ops::CompileFilter,
+ prev: &CrateListingV1,
+) -> BTreeMap<String, Option<PackageId>> {
let check = |name: String| {
// Need to provide type, works around Rust Issue #93349
let name = format!("{}{}", name, env::consts::EXE_SUFFIX);
}
};
match *filter {
- CompileFilter::Default { .. } => {
- pkg.targets().iter()
- .filter(|t| t.is_bin())
- .filter_map(|t| check(t.name().to_string()))
- .collect()
- }
- CompileFilter::Only { ref bins, ref examples, .. } => {
+ CompileFilter::Default { .. } => pkg.targets()
+ .iter()
+ .filter(|t| t.is_bin())
+ .filter_map(|t| check(t.name().to_string()))
+ .collect(),
+ CompileFilter::Only {
+ ref bins,
+ ref examples,
+ ..
+ } => {
let all_bins: Vec<String> = bins.try_collect().unwrap_or_else(|| {
- pkg.targets().iter().filter(|t| t.is_bin())
- .map(|t| t.name().to_string())
- .collect()
+ pkg.targets()
+ .iter()
+ .filter(|t| t.is_bin())
+ .map(|t| t.name().to_string())
+ .collect()
});
let all_examples: Vec<String> = examples.try_collect().unwrap_or_else(|| {
- pkg.targets().iter().filter(|t| t.is_bin_example())
- .map(|t| t.name().to_string())
- .collect()
+ pkg.targets()
+ .iter()
+ .filter(|t| t.is_bin_example())
+ .map(|t| t.name().to_string())
+ .collect()
});
- all_bins.iter().chain(all_examples.iter())
- .filter_map(|t| check(t.clone()))
- .collect::<BTreeMap<String, Option<PackageId>>>()
+ all_bins
+ .iter()
+ .chain(all_examples.iter())
+ .filter_map(|t| check(t.clone()))
+ .collect::<BTreeMap<String, Option<PackageId>>>()
}
}
}
let listing = (|| -> CargoResult<_> {
let mut contents = String::new();
file.file().read_to_string(&mut contents)?;
- let listing = toml::from_str(&contents).chain_err(|| {
- internal("invalid TOML found for metadata")
- })?;
+ let listing =
+ toml::from_str(&contents).chain_err(|| internal("invalid TOML found for metadata"))?;
match listing {
CrateListing::V1(v1) => Ok(v1),
- CrateListing::Empty(_) => {
- Ok(CrateListingV1 { v1: BTreeMap::new() })
- }
+ CrateListing::Empty(_) => Ok(CrateListingV1 {
+ v1: BTreeMap::new(),
+ }),
}
- })().chain_err(|| {
- format_err!("failed to parse crate metadata at `{}`",
- file.path().to_string_lossy())
+ })()
+ .chain_err(|| {
+ format_err!(
+ "failed to parse crate metadata at `{}`",
+ file.path().to_string_lossy()
+ )
})?;
Ok(listing)
}
let data = toml::to_string(&CrateListing::V1(listing))?;
file.write_all(data.as_bytes())?;
Ok(())
- })().chain_err(|| {
- format_err!("failed to write crate metadata at `{}`",
- file.path().to_string_lossy())
+ })()
+ .chain_err(|| {
+ format_err!(
+ "failed to write crate metadata at `{}`",
+ file.path().to_string_lossy()
+ )
})?;
Ok(())
}
Ok(())
}
-pub fn uninstall(root: Option<&str>,
- specs: Vec<&str>,
- bins: &[String],
- config: &Config) -> CargoResult<()> {
+pub fn uninstall(
+ root: Option<&str>,
+ specs: Vec<&str>,
+ bins: &[String],
+ config: &Config,
+) -> CargoResult<()> {
if specs.len() > 1 && !bins.is_empty() {
bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.");
}
let mut summary = vec![];
if !succeeded.is_empty() {
- summary.push(format!("Successfully uninstalled {}!", succeeded.join(", ")));
+ summary.push(format!(
+ "Successfully uninstalled {}!",
+ succeeded.join(", ")
+ ));
}
if !failed.is_empty() {
- summary.push(format!("Failed to uninstall {} (see error(s) above).", failed.join(", ")));
+ summary.push(format!(
+ "Failed to uninstall {} (see error(s) above).",
+ failed.join(", ")
+ ));
}
if !succeeded.is_empty() || !failed.is_empty() {
Ok(())
}
-pub fn uninstall_one(root: &Filesystem,
- spec: &str,
- bins: &[String],
- config: &Config) -> CargoResult<()> {
+pub fn uninstall_one(
+ root: &Filesystem,
+ spec: &str,
+ bins: &[String],
+ config: &Config,
+) -> CargoResult<()> {
let crate_metadata = metadata(config, root)?;
let mut metadata = read_crate_list(&crate_metadata)?;
let mut to_remove = Vec::new();
{
- let result = PackageIdSpec::query_str(spec, metadata.v1.keys())?
- .clone();
+ let result = PackageIdSpec::query_str(spec, metadata.v1.keys())?.clone();
let mut installed = match metadata.v1.entry(result.clone()) {
Entry::Occupied(e) => e,
Entry::Vacant(..) => panic!("entry not found: {}", result),
for bin in installed.get() {
let bin = dst.join(bin);
if fs::metadata(&bin).is_err() {
- bail!("corrupt metadata, `{}` does not exist when it should",
- bin.display())
+ bail!(
+ "corrupt metadata, `{}` does not exist when it should",
+ bin.display()
+ )
}
}
- let bins = bins.iter().map(|s| {
- if s.ends_with(env::consts::EXE_SUFFIX) {
- s.to_string()
- } else {
- format!("{}{}", s, env::consts::EXE_SUFFIX)
- }
- }).collect::<Vec<_>>();
+ let bins = bins.iter()
+ .map(|s| {
+ if s.ends_with(env::consts::EXE_SUFFIX) {
+ s.to_string()
+ } else {
+ format!("{}{}", s, env::consts::EXE_SUFFIX)
+ }
+ })
+ .collect::<Vec<_>>();
for bin in bins.iter() {
if !installed.get().contains(bin) {
root.open_rw(Path::new(".crates.toml"), config, "crate metadata")
}
-fn resolve_root(flag: Option<&str>,
- config: &Config) -> CargoResult<Filesystem> {
+fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult<Filesystem> {
let config_root = config.get_path("install.root")?;
- Ok(flag.map(PathBuf::from).or_else(|| {
- env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from)
- }).or_else(move || {
- config_root.map(|v| v.val)
- }).map(Filesystem::new).unwrap_or_else(|| {
- config.home().clone()
- }))
+ Ok(flag.map(PathBuf::from)
+ .or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from))
+ .or_else(move || config_root.map(|v| v.val))
+ .map(Filesystem::new)
+ .unwrap_or_else(|| config.home().clone()))
}
use core::Workspace;
use ops::is_bad_artifact_name;
-use util::{GitRepo, HgRepo, PijulRepo, FossilRepo, internal};
-use util::{Config, paths};
+use util::{internal, FossilRepo, GitRepo, HgRepo, PijulRepo};
+use util::{paths, Config};
use util::errors::{CargoResult, CargoResultExt};
use toml;
#[derive(Clone, Copy, Debug, PartialEq)]
-pub enum VersionControl { Git, Hg, Pijul, Fossil, NoVcs }
+pub enum VersionControl {
+ Git,
+ Hg,
+ Pijul,
+ Fossil,
+ NoVcs,
+}
#[derive(Debug)]
pub struct NewOptions {
}
impl NewOptions {
- pub fn new(version_control: Option<VersionControl>,
- bin: bool,
- lib: bool,
- path: String,
- name: Option<String>) -> CargoResult<NewOptions> {
-
+ pub fn new(
+ version_control: Option<VersionControl>,
+ bin: bool,
+ lib: bool,
+ path: String,
+ name: Option<String>,
+ ) -> CargoResult<NewOptions> {
let kind = match (bin, lib) {
(true, true) => bail!("can't specify both lib and binary outputs"),
(false, true) => NewProjectKind::Lib,
(_, false) => NewProjectKind::Bin,
};
- let opts = NewOptions { version_control, kind, path, name };
+ let opts = NewOptions {
+ version_control,
+ kind,
+ path,
+ name,
+ };
Ok(opts)
}
}
}
let file_name = path.file_name().ok_or_else(|| {
- format_err!("cannot auto-detect project name from path {:?} ; use --name to override", path.as_os_str())
+ format_err!(
+ "cannot auto-detect project name from path {:?} ; use --name to override",
+ path.as_os_str()
+ )
})?;
file_name.to_str().ok_or_else(|| {
- format_err!("cannot create project with a non-unicode name: {:?}", file_name)
+ format_err!(
+ "cannot create project with a non-unicode name: {:?}",
+ file_name
+ )
})
}
fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> {
-
// If --name is already used to override, no point in suggesting it
// again as a fix.
let name_help = match opts.name {
// Ban keywords + test list found at
// https://doc.rust-lang.org/grammar.html#keywords
- let blacklist = ["abstract", "alignof", "as", "become", "box",
- "break", "const", "continue", "crate", "do",
- "else", "enum", "extern", "false", "final",
- "fn", "for", "if", "impl", "in",
- "let", "loop", "macro", "match", "mod",
- "move", "mut", "offsetof", "override", "priv",
- "proc", "pub", "pure", "ref", "return",
- "self", "sizeof", "static", "struct",
- "super", "test", "trait", "true", "type", "typeof",
- "unsafe", "unsized", "use", "virtual", "where",
- "while", "yield"];
+ let blacklist = [
+ "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
+ "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop",
+ "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub",
+ "pure", "ref", "return", "self", "sizeof", "static", "struct", "super", "test", "trait",
+ "true", "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
+ ];
if blacklist.contains(&name) || (opts.kind.is_bin() && is_bad_artifact_name(name)) {
- bail!("The name `{}` cannot be used as a crate name{}",
+ bail!(
+ "The name `{}` cannot be used as a crate name{}",
name,
- name_help)
+ name_help
+ )
}
if let Some(ref c) = name.chars().nth(0) {
if c.is_digit(10) {
- bail!("Package names starting with a digit cannot be used as a crate name{}",
- name_help)
+ bail!(
+ "Package names starting with a digit cannot be used as a crate name{}",
+ name_help
+ )
}
}
for c in name.chars() {
- if c.is_alphanumeric() { continue }
- if c == '_' || c == '-' { continue }
- bail!("Invalid character `{}` in crate name: `{}`{}",
+ if c.is_alphanumeric() {
+ continue;
+ }
+ if c == '_' || c == '-' {
+ continue;
+ }
+ bail!(
+ "Invalid character `{}` in crate name: `{}`{}",
c,
name,
- name_help)
+ name_help
+ )
}
Ok(())
}
-fn detect_source_paths_and_types(project_path : &Path,
- project_name: &str,
- detected_files: &mut Vec<SourceFileInformation>,
- ) -> CargoResult<()> {
+fn detect_source_paths_and_types(
+ project_path: &Path,
+ project_name: &str,
+ detected_files: &mut Vec<SourceFileInformation>,
+) -> CargoResult<()> {
let path = project_path;
let name = project_name;
}
let tests = vec![
- Test { proposed_path: format!("src/main.rs"), handling: H::Bin },
- Test { proposed_path: format!("main.rs"), handling: H::Bin },
- Test { proposed_path: format!("src/{}.rs", name), handling: H::Detect },
- Test { proposed_path: format!("{}.rs", name), handling: H::Detect },
- Test { proposed_path: format!("src/lib.rs"), handling: H::Lib },
- Test { proposed_path: format!("lib.rs"), handling: H::Lib },
+ Test {
+ proposed_path: format!("src/main.rs"),
+ handling: H::Bin,
+ },
+ Test {
+ proposed_path: format!("main.rs"),
+ handling: H::Bin,
+ },
+ Test {
+ proposed_path: format!("src/{}.rs", name),
+ handling: H::Detect,
+ },
+ Test {
+ proposed_path: format!("{}.rs", name),
+ handling: H::Detect,
+ },
+ Test {
+ proposed_path: format!("src/lib.rs"),
+ handling: H::Lib,
+ },
+ Test {
+ proposed_path: format!("lib.rs"),
+ handling: H::Lib,
+ },
];
for i in tests {
let pp = i.proposed_path;
// path/pp does not exist or is not a file
- if !fs::metadata(&path.join(&pp)).map(|x| x.is_file()).unwrap_or(false) {
+ if !fs::metadata(&path.join(&pp))
+ .map(|x| x.is_file())
+ .unwrap_or(false)
+ {
continue;
}
let sfi = match i.handling {
- H::Bin => {
- SourceFileInformation {
- relative_path: pp,
- target_name: project_name.to_string(),
- bin: true
- }
- }
- H::Lib => {
- SourceFileInformation {
- relative_path: pp,
- target_name: project_name.to_string(),
- bin: false
- }
- }
+ H::Bin => SourceFileInformation {
+ relative_path: pp,
+ target_name: project_name.to_string(),
+ bin: true,
+ },
+ H::Lib => SourceFileInformation {
+ relative_path: pp,
+ target_name: project_name.to_string(),
+ bin: false,
+ },
H::Detect => {
let content = paths::read(&path.join(pp.clone()))?;
let isbin = content.contains("fn main");
SourceFileInformation {
relative_path: pp,
target_name: project_name.to_string(),
- bin: isbin
+ bin: isbin,
}
}
};
// Check for duplicate lib attempt
- let mut previous_lib_relpath : Option<&str> = None;
- let mut duplicates_checker : BTreeMap<&str, &SourceFileInformation> = BTreeMap::new();
+ let mut previous_lib_relpath: Option<&str> = None;
+ let mut duplicates_checker: BTreeMap<&str, &SourceFileInformation> = BTreeMap::new();
for i in detected_files {
if i.bin {
if let Some(x) = BTreeMap::get::<str>(&duplicates_checker, i.target_name.as_ref()) {
- bail!("\
+ bail!(
+ "\
multiple possible binary sources found:
{}
{}
cannot automatically generate Cargo.toml as the main target would be ambiguous",
- &x.relative_path, &i.relative_path);
+ &x.relative_path,
+ &i.relative_path
+ );
}
duplicates_checker.insert(i.target_name.as_ref(), i);
} else {
if let Some(plp) = previous_lib_relpath {
- bail!("cannot have a project with \
- multiple libraries, \
- found both `{}` and `{}`",
- plp, i.relative_path)
+ bail!(
+ "cannot have a project with \
+ multiple libraries, \
+ found both `{}` and `{}`",
+ plp,
+ i.relative_path
+ )
}
previous_lib_relpath = Some(&i.relative_path);
}
fn plan_new_source_file(bin: bool, project_name: String) -> SourceFileInformation {
if bin {
SourceFileInformation {
- relative_path: "src/main.rs".to_string(),
- target_name: project_name,
- bin: true,
+ relative_path: "src/main.rs".to_string(),
+ target_name: project_name,
+ bin: true,
}
} else {
SourceFileInformation {
- relative_path: "src/lib.rs".to_string(),
- target_name: project_name,
- bin: false,
+ relative_path: "src/lib.rs".to_string(),
+ target_name: project_name,
+ bin: false,
}
}
}
pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {
let path = config.cwd().join(&opts.path);
if fs::metadata(&path).is_ok() {
- bail!("destination `{}` already exists\n\n\
- Use `cargo init` to initialize the directory\
- ", path.display()
+ bail!(
+ "destination `{}` already exists\n\n\
+ Use `cargo init` to initialize the directory\
+ ",
+ path.display()
)
}
};
mk(config, &mkopts).chain_err(|| {
- format_err!("Failed to create project `{}` at `{}`",
- name, path.display())
+ format_err!(
+ "Failed to create project `{}` at `{}`",
+ name,
+ path.display()
+ )
})?;
Ok(())
}
// if none exists, maybe create git, like in `cargo new`
if num_detected_vsces > 1 {
- bail!("more than one of .hg, .git, .pijul, .fossil configurations \
- found and the ignore file can't be filled in as \
- a result. specify --vcs to override detection");
+ bail!(
+ "more than one of .hg, .git, .pijul, .fossil configurations \
+ found and the ignore file can't be filled in as \
+ a result. specify --vcs to override detection"
+ );
}
}
version_control,
path: &path,
name,
- bin: src_paths_types.iter().any(|x|x.bin),
+ bin: src_paths_types.iter().any(|x| x.bin),
source_files: src_paths_types,
};
mk(config, &mkopts).chain_err(|| {
- format_err!("Failed to create project `{}` at `{}`",
- name, path.display())
+ format_err!(
+ "Failed to create project `{}` at `{}`",
+ name,
+ path.display()
+ )
})?;
Ok(())
}
let name = opts.name;
let cfg = global_config(config)?;
// Please ensure that ignore and hgignore are in sync.
- let ignore = ["\n", "/target\n", "**/*.rs.bk\n",
- if !opts.bin { "Cargo.lock\n" } else { "" }]
- .concat();
+ let ignore = [
+ "\n",
+ "/target\n",
+ "**/*.rs.bk\n",
+ if !opts.bin { "Cargo.lock\n" } else { "" },
+ ].concat();
// Mercurial glob ignores can't be rooted, so just sticking a 'syntax: glob' at the top of the
// file will exclude too much. Instead, use regexp-based ignores. See 'hg help ignore' for
// more.
- let hgignore = ["\n", "^target/\n", "glob:*.rs.bk\n",
- if !opts.bin { "glob:Cargo.lock\n" } else { "" }]
- .concat();
-
- let vcs = opts.version_control
- .unwrap_or_else(|| {
- let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path),
- config.cwd());
- match (cfg.version_control, in_existing_vcs) {
- (None, false) => VersionControl::Git,
- (Some(opt), false) => opt,
- (_, true) => VersionControl::NoVcs,
- }
- });
+ let hgignore = [
+ "\n",
+ "^target/\n",
+ "glob:*.rs.bk\n",
+ if !opts.bin { "glob:Cargo.lock\n" } else { "" },
+ ].concat();
+
+ let vcs = opts.version_control.unwrap_or_else(|| {
+ let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd());
+ match (cfg.version_control, in_existing_vcs) {
+ (None, false) => VersionControl::Git,
+ (Some(opt), false) => opt,
+ (_, true) => VersionControl::NoVcs,
+ }
+ });
match vcs {
VersionControl::Git => {
GitRepo::init(path, config.cwd())?;
}
paths::append(&path.join(".gitignore"), ignore.as_bytes())?;
- },
+ }
VersionControl::Hg => {
if !fs::metadata(&path.join(".hg")).is_ok() {
HgRepo::init(path, config.cwd())?;
}
paths::append(&path.join(".hgignore"), hgignore.as_bytes())?;
- },
+ }
VersionControl::Pijul => {
if !fs::metadata(&path.join(".pijul")).is_ok() {
PijulRepo::init(path, config.cwd())?;
}
paths::append(&path.join(".ignore"), ignore.as_bytes())?;
- },
+ }
VersionControl::Fossil => {
if !fs::metadata(&path.join(".fossil")).is_ok() {
FossilRepo::init(path, config.cwd())?;
}
- },
+ }
VersionControl::NoVcs => {
fs::create_dir_all(path)?;
- },
+ }
};
let (author_name, email) = discover_author()?;
// Hoo boy, sure glad we've got exhaustiveness checking behind us.
let author = match (cfg.name, cfg.email, author_name, email) {
- (Some(name), Some(email), _, _) |
- (Some(name), None, _, Some(email)) |
- (None, Some(email), name, _) |
- (None, None, name, Some(email)) => format!("{} <{}>", name, email),
- (Some(name), None, _, None) |
- (None, None, name, None) => name,
+ (Some(name), Some(email), _, _)
+ | (Some(name), None, _, Some(email))
+ | (None, Some(email), name, _)
+ | (None, None, name, Some(email)) => format!("{} <{}>", name, email),
+ (Some(name), None, _, None) | (None, None, name, None) => name,
};
let mut cargotoml_path_specifier = String::new();
for i in &opts.source_files {
if i.bin {
if i.relative_path != "src/main.rs" {
- cargotoml_path_specifier.push_str(&format!(r#"
+ cargotoml_path_specifier.push_str(&format!(
+ r#"
[[bin]]
name = "{}"
path = {}
-"#, i.target_name, toml::Value::String(i.relative_path.clone())));
+"#,
+ i.target_name,
+ toml::Value::String(i.relative_path.clone())
+ ));
}
} else if i.relative_path != "src/lib.rs" {
- cargotoml_path_specifier.push_str(&format!(r#"
+ cargotoml_path_specifier.push_str(&format!(
+ r#"
[lib]
name = "{}"
path = {}
-"#, i.target_name, toml::Value::String(i.relative_path.clone())));
+"#,
+ i.target_name,
+ toml::Value::String(i.relative_path.clone())
+ ));
}
}
// Create Cargo.toml file with necessary [lib] and [[bin]] sections, if needed
- paths::write(&path.join("Cargo.toml"), format!(
-r#"[package]
+ paths::write(
+ &path.join("Cargo.toml"),
+ format!(
+ r#"[package]
name = "{}"
version = "0.1.0"
authors = [{}]
[dependencies]
-{}"#, name, toml::Value::String(author), cargotoml_path_specifier).as_bytes())?;
-
+{}"#,
+ name,
+ toml::Value::String(author),
+ cargotoml_path_specifier
+ ).as_bytes(),
+ )?;
// Create all specified source files
// (with respective parent directories)
fs::create_dir_all(src_dir)?;
}
- let default_file_content : &[u8] = if i.bin {
+ let default_file_content: &[u8] = if i.bin {
b"\
fn main() {
println!(\"Hello, world!\");
"
};
- if !fs::metadata(&path_of_source_file).map(|x| x.is_file()).unwrap_or(false) {
+ if !fs::metadata(&path_of_source_file)
+ .map(|x| x.is_file())
+ .unwrap_or(false)
+ {
paths::write(&path_of_source_file, default_file_content)?;
}
}
if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) {
- let msg = format!("compiling this new crate may not work due to invalid \
- workspace configuration\n\n{}", e);
+ let msg = format!(
+ "compiling this new crate may not work due to invalid \
+ workspace configuration\n\n{}",
+ e
+ );
config.shell().warn(msg)?;
}
Ok(())
}
-fn get_environment_variable(variables: &[&str] ) -> Option<String>{
- variables.iter()
- .filter_map(|var| env::var(var).ok())
- .next()
+fn get_environment_variable(variables: &[&str]) -> Option<String> {
+ variables.iter().filter_map(|var| env::var(var).ok()).next()
}
fn discover_author() -> CargoResult<(String, Option<String>)> {
let cwd = env::current_dir()?;
let git_config = if let Ok(repo) = GitRepository::discover(&cwd) {
- repo.config().ok().or_else(|| GitConfig::open_default().ok())
+ repo.config()
+ .ok()
+ .or_else(|| GitConfig::open_default().ok())
} else {
GitConfig::open_default().ok()
};
let git_config = git_config.as_ref();
- let name_variables = ["CARGO_NAME", "GIT_AUTHOR_NAME", "GIT_COMMITTER_NAME",
- "USER", "USERNAME", "NAME"];
+ let name_variables = [
+ "CARGO_NAME",
+ "GIT_AUTHOR_NAME",
+ "GIT_COMMITTER_NAME",
+ "USER",
+ "USERNAME",
+ "NAME",
+ ];
let name = get_environment_variable(&name_variables[0..3])
- .or_else(|| git_config.and_then(|g| g.get_string("user.name").ok()))
- .or_else(|| get_environment_variable(&name_variables[3..]));
+ .or_else(|| git_config.and_then(|g| g.get_string("user.name").ok()))
+ .or_else(|| get_environment_variable(&name_variables[3..]));
let name = match name {
Some(name) => name,
None => {
- let username_var = if cfg!(windows) {"USERNAME"} else {"USER"};
- bail!("could not determine the current user, please set ${}",
- username_var)
+ let username_var = if cfg!(windows) { "USERNAME" } else { "USER" };
+ bail!(
+ "could not determine the current user, please set ${}",
+ username_var
+ )
}
};
- let email_variables = ["CARGO_EMAIL", "GIT_AUTHOR_EMAIL", "GIT_COMMITTER_EMAIL",
- "EMAIL"];
+ let email_variables = [
+ "CARGO_EMAIL",
+ "GIT_AUTHOR_EMAIL",
+ "GIT_COMMITTER_EMAIL",
+ "EMAIL",
+ ];
let email = get_environment_variable(&email_variables[0..3])
- .or_else(|| git_config.and_then(|g| g.get_string("user.email").ok()))
- .or_else(|| get_environment_variable(&email_variables[3..]));
+ .or_else(|| git_config.and_then(|g| g.get_string("user.email").ok()))
+ .or_else(|| get_environment_variable(&email_variables[3..]));
let name = name.trim().to_string();
let email = email.map(|s| s.trim().to_string());
Some(("pijul", _)) => Some(VersionControl::Pijul),
Some(("none", _)) => Some(VersionControl::NoVcs),
Some((s, p)) => {
- return Err(internal(format!("invalid configuration for key \
- `cargo-new.vcs`, unknown vcs `{}` \
- (found in {})", s, p)))
+ return Err(internal(format!(
+ "invalid configuration for key \
+ `cargo-new.vcs`, unknown vcs `{}` \
+ (found in {})",
+ s, p
+ )))
}
- None => None
+ None => None,
};
Ok(CargoNewConfig {
name,
/// Loads the manifest, resolves the dependencies of the project to the concrete
/// used versions - considering overrides - and writes all dependencies in a JSON
/// format to stdout.
-pub fn output_metadata(ws: &Workspace,
- opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
+pub fn output_metadata(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
if opt.version != VERSION {
- bail!("metadata version {} not supported, only {} is currently supported",
- opt.version, VERSION);
+ bail!(
+ "metadata version {} not supported, only {} is currently supported",
+ opt.version,
+ VERSION
+ );
}
if opt.no_deps {
metadata_no_deps(ws, opt)
}
}
-fn metadata_no_deps(ws: &Workspace,
- _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
+fn metadata_no_deps(ws: &Workspace, _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
Ok(ExportInfo {
packages: ws.members().cloned().collect(),
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
})
}
-fn metadata_full(ws: &Workspace,
- opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
+fn metadata_full(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
let specs = Packages::All.into_package_id_specs(ws)?;
- let deps = ops::resolve_ws_precisely(ws,
- None,
- &opt.features,
- opt.all_features,
- opt.no_default_features,
- &specs)?;
+ let deps = ops::resolve_ws_precisely(
+ ws,
+ None,
+ &opt.features,
+ opt.all_features,
+ opt.no_default_features,
+ &specs,
+ )?;
let (packages, resolve) = deps;
- let packages = packages.package_ids()
- .map(|i| packages.get(i).map(|p| p.clone()))
- .collect::<CargoResult<Vec<_>>>()?;
+ let packages = packages
+ .package_ids()
+ .map(|i| packages.get(i).map(|p| p.clone()))
+ .collect::<CargoResult<Vec<_>>>()?;
Ok(ExportInfo {
packages,
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
- resolve: Some(MetadataResolve{
+ resolve: Some(MetadataResolve {
resolve,
root: ws.current_opt().map(|pkg| pkg.package_id().clone()),
}),
/// format for `PackageId`s
#[derive(Serialize)]
struct MetadataResolve {
- #[serde(rename = "nodes", serialize_with = "serialize_resolve")]
- resolve: Resolve,
+ #[serde(rename = "nodes", serialize_with = "serialize_resolve")] resolve: Resolve,
root: Option<PackageId>,
}
fn serialize_resolve<S>(resolve: &Resolve, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+where
+ S: ser::Serializer,
{
#[derive(Serialize)]
struct Node<'a> {
features: Vec<&'a str>,
}
- resolve.iter().map(|id| {
- Node {
+ resolve
+ .iter()
+ .map(|id| Node {
id,
dependencies: resolve.deps(id).collect(),
features: resolve.features_sorted(id),
- }
- }).collect::<Vec<_>>().serialize(s)
+ })
+ .collect::<Vec<_>>()
+ .serialize(s)
}
use std::sync::Arc;
use flate2::read::GzDecoder;
-use flate2::{GzBuilder, Compression};
+use flate2::{Compression, GzBuilder};
use git2;
-use tar::{Archive, Builder, Header, EntryType};
+use tar::{Archive, Builder, EntryType, Header};
-use core::{Package, Workspace, Source, SourceId};
+use core::{Package, Source, SourceId, Workspace};
use sources::PathSource;
use util::{self, internal, Config, FileLock};
use util::paths;
pub registry: Option<String>,
}
-pub fn package(ws: &Workspace,
- opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
+pub fn package(ws: &Workspace, opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
ops::resolve_ws(ws)?;
let pkg = ws.current()?;
let config = ws.config();
- let mut src = PathSource::new(pkg.root(),
- pkg.package_id().source_id(),
- config);
+ let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config);
src.update()?;
if opts.check_metadata {
if opts.list {
let root = pkg.root();
- let mut list: Vec<_> = src.list_files(pkg)?.iter().map(|file| {
- util::without_prefix(file, root).unwrap().to_path_buf()
- }).collect();
+ let mut list: Vec<_> = src.list_files(pkg)?
+ .iter()
+ .map(|file| util::without_prefix(file, root).unwrap().to_path_buf())
+ .collect();
if include_lockfile(&pkg) {
list.push("Cargo.lock".into());
}
for file in list.iter() {
println!("{}", file.display());
}
- return Ok(None)
+ return Ok(None);
}
if !opts.allow_dirty {
// location if it actually passes all our tests. Any previously existing
// tarball can be assumed as corrupt or invalid, so we just blow it away if
// it exists.
- config.shell().status("Packaging", pkg.package_id().to_string())?;
+ config
+ .shell()
+ .status("Packaging", pkg.package_id().to_string())?;
dst.file().set_len(0)?;
- tar(ws, &src, dst.file(), &filename).chain_err(|| {
- format_err!("failed to prepare local package for uploading")
- })?;
+ tar(ws, &src, dst.file(), &filename)
+ .chain_err(|| format_err!("failed to prepare local package for uploading"))?;
if opts.verify {
dst.seek(SeekFrom::Start(0))?;
- run_verify(ws, &dst, opts).chain_err(|| {
- "failed to verify package tarball"
- })?
+ run_verify(ws, &dst, opts).chain_err(|| "failed to verify package tarball")?
}
dst.seek(SeekFrom::Start(0))?;
{
let src_path = dst.path();
let dst_path = dst.parent().join(&filename);
- fs::rename(&src_path, &dst_path).chain_err(|| {
- "failed to move temporary tarball into final location"
- })?;
+ fs::rename(&src_path, &dst_path)
+ .chain_err(|| "failed to move temporary tarball into final location")?;
}
Ok(Some(dst))
}
fn include_lockfile(pkg: &Package) -> bool {
- pkg.manifest().publish_lockfile() &&
- pkg.targets().iter().any(|t| t.is_example() || t.is_bin())
+ pkg.manifest().publish_lockfile() && pkg.targets().iter().any(|t| t.is_example() || t.is_bin())
}
// check that the package has some piece of metadata that a human can
)*
}}
}
- lacking!(description, license || license_file, documentation || homepage || repository);
+ lacking!(
+ description,
+ license || license_file,
+ documentation || homepage || repository
+ );
if !missing.is_empty() {
let mut things = missing[..missing.len() - 1].join(", ");
}
things.push_str(missing.last().unwrap());
- config.shell().warn(
- &format!("manifest has no {things}.\n\
- See http://doc.crates.io/manifest.html#package-metadata for more info.",
- things = things))?
+ config.shell().warn(&format!(
+ "manifest has no {things}.\n\
+ See http://doc.crates.io/manifest.html#package-metadata for more info.",
+ things = things
+ ))?
}
Ok(())
}
fn verify_dependencies(pkg: &Package) -> CargoResult<()> {
for dep in pkg.dependencies() {
if dep.source_id().is_path() && !dep.specified_req() {
- bail!("all path dependencies must have a version specified \
- when packaging.\ndependency `{}` does not specify \
- a version.", dep.name())
+ bail!(
+ "all path dependencies must have a version specified \
+ when packaging.\ndependency `{}` does not specify \
+ a version.",
+ dep.name()
+ )
}
}
Ok(())
fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> {
if let Ok(repo) = git2::Repository::discover(p.root()) {
if let Some(workdir) = repo.workdir() {
- debug!("found a git repo at {:?}, checking if index present",
- workdir);
+ debug!(
+ "found a git repo at {:?}, checking if index present",
+ workdir
+ );
let path = p.manifest_path();
let path = path.strip_prefix(workdir).unwrap_or(path);
if let Ok(status) = repo.status_file(path) {
if (status & git2::Status::IGNORED).is_empty() {
debug!("Cargo.toml found in repo, checking if dirty");
- return git(p, src, &repo)
+ return git(p, src, &repo);
}
}
}
// have to assume that it's clean.
return Ok(());
- fn git(p: &Package,
- src: &PathSource,
- repo: &git2::Repository) -> CargoResult<()> {
+ fn git(p: &Package, src: &PathSource, repo: &git2::Repository) -> CargoResult<()> {
let workdir = repo.workdir().unwrap();
- let dirty = src.list_files(p)?.iter().filter(|file| {
- let relative = file.strip_prefix(workdir).unwrap();
- if let Ok(status) = repo.status_file(relative) {
- status != git2::Status::CURRENT
- } else {
- false
- }
- }).map(|path| {
- path.strip_prefix(p.root()).unwrap_or(path).display().to_string()
- }).collect::<Vec<_>>();
+ let dirty = src.list_files(p)?
+ .iter()
+ .filter(|file| {
+ let relative = file.strip_prefix(workdir).unwrap();
+ if let Ok(status) = repo.status_file(relative) {
+ status != git2::Status::CURRENT
+ } else {
+ false
+ }
+ })
+ .map(|path| {
+ path.strip_prefix(p.root())
+ .unwrap_or(path)
+ .display()
+ .to_string()
+ })
+ .collect::<Vec<_>>();
if dirty.is_empty() {
Ok(())
} else {
- bail!("{} files in the working directory contain changes that were \
- not yet committed into git:\n\n{}\n\n\
- to proceed despite this, pass the `--allow-dirty` flag",
- dirty.len(), dirty.join("\n"))
+ bail!(
+ "{} files in the working directory contain changes that were \
+ not yet committed into git:\n\n{}\n\n\
+ to proceed despite this, pass the `--allow-dirty` flag",
+ dirty.len(),
+ dirty.join("\n")
+ )
}
}
}
-fn tar(ws: &Workspace,
- src: &PathSource,
- dst: &File,
- filename: &str) -> CargoResult<()> {
+fn tar(ws: &Workspace, src: &PathSource, dst: &File, filename: &str) -> CargoResult<()> {
// Prepare the encoder and its header
let filename = Path::new(filename);
- let encoder = GzBuilder::new().filename(util::path2bytes(filename)?)
- .write(dst, Compression::best());
+ let encoder = GzBuilder::new()
+ .filename(util::path2bytes(filename)?)
+ .write(dst, Compression::best());
// Put all package files into a compressed archive
let mut ar = Builder::new(encoder);
let relative = util::without_prefix(file, root).unwrap();
check_filename(relative)?;
let relative = relative.to_str().ok_or_else(|| {
- format_err!("non-utf8 path in source directory: {}",
- relative.display())
+ format_err!("non-utf8 path in source directory: {}", relative.display())
})?;
- config.shell().verbose(|shell| {
- shell.status("Archiving", &relative)
- })?;
- let path = format!("{}-{}{}{}", pkg.name(), pkg.version(),
- path::MAIN_SEPARATOR, relative);
+ config
+ .shell()
+ .verbose(|shell| shell.status("Archiving", &relative))?;
+ let path = format!(
+ "{}-{}{}{}",
+ pkg.name(),
+ pkg.version(),
+ path::MAIN_SEPARATOR,
+ relative
+ );
// The tar::Builder type by default will build GNU archives, but
// unfortunately we force it here to use UStar archives instead. The
// unpack the selectors 0.4.0 crate on crates.io. Either that or take a
// look at rust-lang/cargo#2326
let mut header = Header::new_ustar();
- header.set_path(&path).chain_err(|| {
- format!("failed to add to archive: `{}`", relative)
- })?;
- let mut file = File::open(file).chain_err(|| {
- format!("failed to open for archiving: `{}`", file.display())
- })?;
- let metadata = file.metadata().chain_err(|| {
- format!("could not learn metadata for: `{}`", relative)
- })?;
+ header
+ .set_path(&path)
+ .chain_err(|| format!("failed to add to archive: `{}`", relative))?;
+ let mut file = File::open(file)
+ .chain_err(|| format!("failed to open for archiving: `{}`", file.display()))?;
+ let metadata = file.metadata()
+ .chain_err(|| format!("could not learn metadata for: `{}`", relative))?;
header.set_metadata(&metadata);
if relative == "Cargo.toml" {
let orig = Path::new(&path).with_file_name("Cargo.toml.orig");
header.set_path(&orig)?;
header.set_cksum();
- ar.append(&header, &mut file).chain_err(|| {
- internal(format!("could not archive source file `{}`", relative))
- })?;
+ ar.append(&header, &mut file)
+ .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
let mut header = Header::new_ustar();
let toml = pkg.to_registry_toml(ws.config())?;
header.set_mode(0o644);
header.set_size(toml.len() as u64);
header.set_cksum();
- ar.append(&header, toml.as_bytes()).chain_err(|| {
- internal(format!("could not archive source file `{}`", relative))
- })?;
+ ar.append(&header, toml.as_bytes())
+ .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
} else {
header.set_cksum();
- ar.append(&header, &mut file).chain_err(|| {
- internal(format!("could not archive source file `{}`", relative))
- })?;
+ ar.append(&header, &mut file)
+ .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
}
}
if include_lockfile(pkg) {
let toml = paths::read(&ws.root().join("Cargo.lock"))?;
- let path = format!("{}-{}{}Cargo.lock", pkg.name(), pkg.version(),
- path::MAIN_SEPARATOR);
+ let path = format!(
+ "{}-{}{}Cargo.lock",
+ pkg.name(),
+ pkg.version(),
+ path::MAIN_SEPARATOR
+ );
let mut header = Header::new_ustar();
header.set_path(&path)?;
header.set_entry_type(EntryType::file());
header.set_mode(0o644);
header.set_size(toml.len() as u64);
header.set_cksum();
- ar.append(&header, toml.as_bytes()).chain_err(|| {
- internal("could not archive source file `Cargo.lock`")
- })?;
+ ar.append(&header, toml.as_bytes())
+ .chain_err(|| internal("could not archive source file `Cargo.lock`"))?;
}
let encoder = ar.into_inner()?;
config.shell().status("Verifying", pkg)?;
let f = GzDecoder::new(tar.file());
- let dst = tar.parent().join(&format!("{}-{}", pkg.name(), pkg.version()));
+ let dst = tar.parent()
+ .join(&format!("{}-{}", pkg.name(), pkg.version()));
if dst.exists() {
paths::remove_dir_all(&dst)?;
}
let new_pkg = src.root_package()?;
let ws = Workspace::ephemeral(new_pkg, config, None, true)?;
- ops::compile_ws(&ws, None, &ops::CompileOptions {
- config,
- jobs: opts.jobs,
- target: opts.target.clone(),
- features: Vec::new(),
- no_default_features: false,
- all_features: false,
- spec: ops::Packages::Packages(Vec::new()),
- filter: ops::CompileFilter::Default { required_features_filterable: true },
- release: false,
- message_format: ops::MessageFormat::Human,
- mode: ops::CompileMode::Build,
- target_rustdoc_args: None,
- target_rustc_args: None,
- }, Arc::new(DefaultExecutor))?;
+ ops::compile_ws(
+ &ws,
+ None,
+ &ops::CompileOptions {
+ config,
+ jobs: opts.jobs,
+ target: opts.target.clone(),
+ features: Vec::new(),
+ no_default_features: false,
+ all_features: false,
+ spec: ops::Packages::Packages(Vec::new()),
+ filter: ops::CompileFilter::Default {
+ required_features_filterable: true,
+ },
+ release: false,
+ message_format: ops::MessageFormat::Human,
+ mode: ops::CompileMode::Build,
+ target_rustdoc_args: None,
+ target_rustc_args: None,
+ },
+ Arc::new(DefaultExecutor),
+ )?;
Ok(())
}
};
let name = match name.to_str() {
Some(name) => name,
- None => {
- bail!("path does not have a unicode filename which may not unpack \
- on all platforms: {}", file.display())
- }
+ None => bail!(
+ "path does not have a unicode filename which may not unpack \
+ on all platforms: {}",
+ file.display()
+ ),
};
let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
- bail!("cannot package a filename with a special character `{}`: {}",
- c, file.display())
+ bail!(
+ "cannot package a filename with a special character `{}`: {}",
+ c,
+ file.display()
+ )
}
Ok(())
}
use std::io;
use std::path::{Path, PathBuf};
-use core::{Package, SourceId, PackageId, EitherManifest};
+use core::{EitherManifest, Package, PackageId, SourceId};
use util::{self, Config};
-use util::errors::{CargoResult, CargoError};
+use util::errors::{CargoError, CargoResult};
use util::important_paths::find_project_manifest_exact;
use util::toml::read_manifest;
-pub fn read_package(path: &Path, source_id: &SourceId, config: &Config)
- -> CargoResult<(Package, Vec<PathBuf>)> {
- trace!("read_package; path={}; source-id={}", path.display(), source_id);
+pub fn read_package(
+ path: &Path,
+ source_id: &SourceId,
+ config: &Config,
+) -> CargoResult<(Package, Vec<PathBuf>)> {
+ trace!(
+ "read_package; path={}; source-id={}",
+ path.display(),
+ source_id
+ );
let (manifest, nested) = read_manifest(path, source_id, config)?;
let manifest = match manifest {
EitherManifest::Real(manifest) => manifest,
- EitherManifest::Virtual(..) => {
- bail!("found a virtual manifest at `{}` instead of a package \
- manifest", path.display())
- }
+ EitherManifest::Virtual(..) => bail!(
+ "found a virtual manifest at `{}` instead of a package \
+ manifest",
+ path.display()
+ ),
};
Ok((Package::new(manifest, path), nested))
}
-pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
- -> CargoResult<Vec<Package>> {
+pub fn read_packages(
+ path: &Path,
+ source_id: &SourceId,
+ config: &Config,
+) -> CargoResult<Vec<Package>> {
let mut all_packages = HashMap::new();
let mut visited = HashSet::<PathBuf>::new();
let mut errors = Vec::<CargoError>::new();
- trace!("looking for root package: {}, source_id={}", path.display(), source_id);
+ trace!(
+ "looking for root package: {}, source_id={}",
+ path.display(),
+ source_id
+ );
walk(path, &mut |dir| {
trace!("looking for child package: {}", dir.display());
if dir != path {
let name = dir.file_name().and_then(|s| s.to_str());
if name.map(|s| s.starts_with('.')) == Some(true) {
- return Ok(false)
+ return Ok(false);
}
// Don't automatically discover packages across git submodules
if fs::metadata(&dir.join(".git")).is_ok() {
- return Ok(false)
+ return Ok(false);
}
}
// Don't ever look at target directories
- if dir.file_name().and_then(|s| s.to_str()) == Some("target") &&
- has_manifest(dir.parent().unwrap()) {
- return Ok(false)
+ if dir.file_name().and_then(|s| s.to_str()) == Some("target")
+ && has_manifest(dir.parent().unwrap())
+ {
+ return Ok(false);
}
if has_manifest(dir) {
- read_nested_packages(dir, &mut all_packages, source_id, config,
- &mut visited, &mut errors)?;
+ read_nested_packages(
+ dir,
+ &mut all_packages,
+ source_id,
+ config,
+ &mut visited,
+ &mut errors,
+ )?;
}
Ok(true)
})?;
if all_packages.is_empty() {
match errors.pop() {
Some(err) => Err(err),
- None => Err(format_err!("Could not find Cargo.toml in `{}`", path.display())),
+ None => Err(format_err!(
+ "Could not find Cargo.toml in `{}`",
+ path.display()
+ )),
}
} else {
Ok(all_packages.into_iter().map(|(_, v)| v).collect())
}
}
-fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult<bool>)
- -> CargoResult<()> {
+fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult<bool>) -> CargoResult<()> {
if !callback(path)? {
trace!("not processing {}", path.display());
- return Ok(())
+ return Ok(());
}
// Ignore any permission denied errors because temporary directories
// can often have some weird permissions on them.
let dirs = match fs::read_dir(path) {
Ok(dirs) => dirs,
- Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => {
- return Ok(())
- }
+ Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()),
Err(e) => {
let cx = format!("failed to read directory `{}`", path.display());
let e = CargoError::from(e);
- return Err(e.context(cx).into())
+ return Err(e.context(cx).into());
}
};
for dir in dirs {
find_project_manifest_exact(path, "Cargo.toml").is_ok()
}
-fn read_nested_packages(path: &Path,
- all_packages: &mut HashMap<PackageId, Package>,
- source_id: &SourceId,
- config: &Config,
- visited: &mut HashSet<PathBuf>,
- errors: &mut Vec<CargoError>) -> CargoResult<()> {
- if !visited.insert(path.to_path_buf()) { return Ok(()) }
+fn read_nested_packages(
+ path: &Path,
+ all_packages: &mut HashMap<PackageId, Package>,
+ source_id: &SourceId,
+ config: &Config,
+ visited: &mut HashSet<PathBuf>,
+ errors: &mut Vec<CargoError>,
+) -> CargoResult<()> {
+ if !visited.insert(path.to_path_buf()) {
+ return Ok(());
+ }
let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?;
// it's safer to ignore malformed manifests to avoid
//
// TODO: Add a way to exclude folders?
- info!("skipping malformed package found at `{}`",
- path.to_string_lossy());
+ info!(
+ "skipping malformed package found at `{}`",
+ path.to_string_lossy()
+ );
errors.push(err);
return Ok(());
}
- Ok(tuple) => tuple
+ Ok(tuple) => tuple,
};
let manifest = match manifest {
let pkg = Package::new(manifest, &manifest_path);
let pkg_id = pkg.package_id().clone();
- use ::std::collections::hash_map::Entry;
+ use std::collections::hash_map::Entry;
match all_packages.entry(pkg_id) {
- Entry::Vacant(v) => { v.insert(pkg); },
+ Entry::Vacant(v) => {
+ v.insert(pkg);
+ }
Entry::Occupied(_) => {
- info!("skipping nested package `{}` found at `{}`",
- pkg.name(), path.to_string_lossy());
+ info!(
+ "skipping nested package `{}` found at `{}`",
+ pkg.name(),
+ path.to_string_lossy()
+ );
}
}
if !source_id.is_registry() {
for p in nested.iter() {
let path = util::normalize_path(&path.join(p));
- read_nested_packages(&path, all_packages, source_id,
- config, visited, errors)?;
+ read_nested_packages(&path, all_packages, source_id, config, visited, errors)?;
}
}
use util::{self, CargoResult, ProcessError};
use core::Workspace;
-pub fn run(ws: &Workspace,
- options: &ops::CompileOptions,
- args: &[String]) -> CargoResult<Option<ProcessError>> {
+pub fn run(
+ ws: &Workspace,
+ options: &ops::CompileOptions,
+ args: &[String],
+) -> CargoResult<Option<ProcessError>> {
let config = ws.config();
let pkg = match options.spec {
- Packages::All |
- Packages::Default |
- Packages::OptOut(_) => unreachable!("cargo run supports single package only"),
+ Packages::All | Packages::Default | Packages::OptOut(_) => {
+ unreachable!("cargo run supports single package only")
+ }
Packages::Packages(ref xs) => match xs.len() {
0 => ws.current()?,
1 => ws.members()
.find(|pkg| &*pkg.name() == xs[0])
- .ok_or_else(||
+ .ok_or_else(|| {
format_err!("package `{}` is not a member of the workspace", xs[0])
- )?,
+ })?,
_ => unreachable!("cargo run supports single package only"),
- }
+ },
};
- let bins: Vec<_> = pkg.manifest().targets().iter().filter(|a| {
- !a.is_lib() && !a.is_custom_build() && if !options.filter.is_specific() {
- a.is_bin()
- } else {
- options.filter.matches(a)
- }
- })
- .map(|bin| bin.name())
- .collect();
+ let bins: Vec<_> = pkg.manifest()
+ .targets()
+ .iter()
+ .filter(|a| {
+ !a.is_lib() && !a.is_custom_build() && if !options.filter.is_specific() {
+ a.is_bin()
+ } else {
+ options.filter.matches(a)
+ }
+ })
+ .map(|bin| bin.name())
+ .collect();
if bins.is_empty() {
if !options.filter.is_specific() {
}
if bins.len() > 1 {
if !options.filter.is_specific() {
- bail!("`cargo run` requires that a project only have one \
- executable; use the `--bin` option to specify which one \
- to run\navailable binaries: {}", bins.join(", "))
+ bail!(
+ "`cargo run` requires that a project only have one \
+ executable; use the `--bin` option to specify which one \
+ to run\navailable binaries: {}",
+ bins.join(", ")
+ )
} else {
- bail!("`cargo run` can run at most one executable, but \
- multiple were specified")
+ bail!(
+ "`cargo run` can run at most one executable, but \
+ multiple were specified"
+ )
}
}
assert_eq!(compile.binaries.len(), 1);
let exe = &compile.binaries[0];
let exe = match util::without_prefix(exe, config.cwd()) {
- Some(path) if path.file_name() == Some(path.as_os_str())
- => Path::new(".").join(path).to_path_buf(),
+ Some(path) if path.file_name() == Some(path.as_os_str()) => {
+ Path::new(".").join(path).to_path_buf()
+ }
Some(path) => path.to_path_buf(),
None => exe.to_path_buf(),
};
-use std::collections::{HashMap, HashSet, BTreeSet};
+use std::collections::{BTreeSet, HashMap, HashSet};
use std::ffi::OsStr;
use std::path::PathBuf;
use semver::Version;
use lazycell::LazyCell;
-use core::{PackageId, Package, Target, TargetKind};
-use util::{self, CargoResult, Config, ProcessBuilder, process, join_paths};
+use core::{Package, PackageId, Target, TargetKind};
+use util::{self, join_paths, process, CargoResult, Config, ProcessBuilder};
/// A structure returning the result of a compilation.
pub struct Compilation<'cfg> {
pub fn new(config: &'cfg Config) -> Compilation<'cfg> {
Compilation {
libraries: HashMap::new(),
- native_dirs: BTreeSet::new(), // TODO: deprecated, remove
+ native_dirs: BTreeSet::new(), // TODO: deprecated, remove
root_output: PathBuf::from("/"),
deps_output: PathBuf::from("/"),
host_deps_output: PathBuf::from("/"),
}
/// See `process`.
- pub fn host_process<T: AsRef<OsStr>>(&self, cmd: T, pkg: &Package)
- -> CargoResult<ProcessBuilder> {
+ pub fn host_process<T: AsRef<OsStr>>(
+ &self,
+ cmd: T,
+ pkg: &Package,
+ ) -> CargoResult<ProcessBuilder> {
self.fill_env(process(cmd), pkg, true)
}
}
/// See `process`.
- pub fn target_process<T: AsRef<OsStr>>(&self, cmd: T, pkg: &Package)
- -> CargoResult<ProcessBuilder> {
+ pub fn target_process<T: AsRef<OsStr>>(
+ &self,
+ cmd: T,
+ pkg: &Package,
+ ) -> CargoResult<ProcessBuilder> {
let builder = if let Some((ref runner, ref args)) = *self.target_runner()? {
let mut builder = process(runner);
builder.args(args);
///
/// The package argument is also used to configure environment variables as
/// well as the working directory of the child process.
- fn fill_env(&self, mut cmd: ProcessBuilder, pkg: &Package, is_host: bool)
- -> CargoResult<ProcessBuilder> {
-
+ fn fill_env(
+ &self,
+ mut cmd: ProcessBuilder,
+ pkg: &Package,
+ is_host: bool,
+ ) -> CargoResult<ProcessBuilder> {
let mut search_path = if is_host {
let mut search_path = vec![self.host_deps_output.clone()];
search_path.extend(self.host_dylib_path.clone());
search_path
} else {
let mut search_path =
- super::filter_dynamic_search_path(self.native_dirs.iter(),
- &self.root_output);
+ super::filter_dynamic_search_path(self.native_dirs.iter(), &self.root_output);
search_path.push(self.root_output.clone());
search_path.push(self.deps_output.clone());
search_path.extend(self.target_dylib_path.clone());
// consider adding the corresponding properties to the hash
// in Context::target_metadata()
cmd.env("CARGO_MANIFEST_DIR", pkg.root())
- .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string())
- .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string())
- .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string())
- .env("CARGO_PKG_VERSION_PRE", &pre_version_component(pkg.version()))
- .env("CARGO_PKG_VERSION", &pkg.version().to_string())
- .env("CARGO_PKG_NAME", &*pkg.name())
- .env("CARGO_PKG_DESCRIPTION", metadata.description.as_ref().unwrap_or(&String::new()))
- .env("CARGO_PKG_HOMEPAGE", metadata.homepage.as_ref().unwrap_or(&String::new()))
- .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":"))
- .cwd(pkg.root());
+ .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string())
+ .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string())
+ .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string())
+ .env(
+ "CARGO_PKG_VERSION_PRE",
+ &pre_version_component(pkg.version()),
+ )
+ .env("CARGO_PKG_VERSION", &pkg.version().to_string())
+ .env("CARGO_PKG_NAME", &*pkg.name())
+ .env(
+ "CARGO_PKG_DESCRIPTION",
+ metadata.description.as_ref().unwrap_or(&String::new()),
+ )
+ .env(
+ "CARGO_PKG_HOMEPAGE",
+ metadata.homepage.as_ref().unwrap_or(&String::new()),
+ )
+ .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":"))
+ .cwd(pkg.root());
Ok(cmd)
}
}
let mut ret = String::new();
for (i, x) in v.pre.iter().enumerate() {
- if i != 0 { ret.push('.') };
+ if i != 0 {
+ ret.push('.')
+ };
ret.push_str(&x.to_string());
}
#![allow(deprecated)]
-use std::collections::{HashSet, HashMap, BTreeSet};
+use std::collections::{BTreeSet, HashMap, HashSet};
use std::collections::hash_map::Entry;
use std::env;
use std::fmt;
-use std::hash::{Hasher, Hash, SipHasher};
+use std::hash::{Hash, Hasher, SipHasher};
use std::path::{Path, PathBuf};
use std::str::{self, FromStr};
use std::sync::Arc;
use jobserver::Client;
-use core::{Package, PackageId, PackageSet, Resolve, Target, Profile};
-use core::{TargetKind, Profiles, Dependency, Workspace};
+use core::{Package, PackageId, PackageSet, Profile, Resolve, Target};
+use core::{Dependency, Profiles, TargetKind, Workspace};
use core::dependency::Kind as DepKind;
-use util::{self, ProcessBuilder, internal, Config, profile, Cfg, CfgExpr};
+use util::{self, internal, profile, Cfg, CfgExpr, Config, ProcessBuilder};
use util::errors::{CargoResult, CargoResultExt};
use super::TargetConfig;
-use super::custom_build::{BuildState, BuildScripts, BuildDeps};
+use super::custom_build::{BuildDeps, BuildScripts, BuildState};
use super::fingerprint::Fingerprint;
use super::layout::Layout;
use super::links::Links;
-use super::{Kind, Compilation, BuildConfig};
+use super::{BuildConfig, Compilation, Kind};
/// All information needed to define a Unit.
///
process.arg("--crate-type").arg(crate_type);
let output = process.exec_with_output().chain_err(|| {
- format!("failed to run `rustc` to learn about \
- crate-type {} information", crate_type)
+ format!(
+ "failed to run `rustc` to learn about \
+ crate-type {} information",
+ crate_type
+ )
})?;
let error = str::from_utf8(&output.stderr).unwrap();
pub struct Metadata(u64);
impl<'a, 'cfg> Context<'a, 'cfg> {
- pub fn new(ws: &'a Workspace<'cfg>,
- resolve: &'a Resolve,
- packages: &'a PackageSet<'cfg>,
- config: &'cfg Config,
- build_config: BuildConfig,
- profiles: &'a Profiles) -> CargoResult<Context<'a, 'cfg>> {
-
- let dest = if build_config.release { "release" } else { "debug" };
+ pub fn new(
+ ws: &'a Workspace<'cfg>,
+ resolve: &'a Resolve,
+ packages: &'a PackageSet<'cfg>,
+ config: &'cfg Config,
+ build_config: BuildConfig,
+ profiles: &'a Profiles,
+ ) -> CargoResult<Context<'a, 'cfg>> {
+ let dest = if build_config.release {
+ "release"
+ } else {
+ "debug"
+ };
let host_layout = Layout::new(ws, None, dest)?;
let target_layout = match build_config.requested_target.as_ref() {
Some(target) => Some(Layout::new(ws, Some(target), dest)?),
// is ourself, a running process.
let jobserver = match config.jobserver_from_env() {
Some(c) => c.clone(),
- None => Client::new(build_config.jobs as usize - 1).chain_err(|| {
- "failed to create jobserver"
- })?,
+ None => Client::new(build_config.jobs as usize - 1)
+ .chain_err(|| "failed to create jobserver")?,
};
Ok(Context {
pub fn prepare(&mut self) -> CargoResult<()> {
let _p = profile::start("preparing layout");
- self.host.prepare().chain_err(|| {
- internal("couldn't prepare build directories")
- })?;
+ self.host
+ .prepare()
+ .chain_err(|| internal("couldn't prepare build directories"))?;
if let Some(ref mut target) = self.target {
- target.prepare().chain_err(|| {
- internal("couldn't prepare build directories")
- })?;
+ target
+ .prepare()
+ .chain_err(|| internal("couldn't prepare build directories"))?;
}
self.compilation.host_deps_output = self.host.deps().to_path_buf();
/// for this unit and its dependencies.
///
/// Tracks visited units to avoid unnecessary work.
- fn visit_crate_type(&self,
- unit: &Unit<'a>,
- crate_types: &mut BTreeSet<String>,
- visited_units: &mut HashSet<Unit<'a>>)
- -> CargoResult<()> {
+ fn visit_crate_type(
+ &self,
+ unit: &Unit<'a>,
+ crate_types: &mut BTreeSet<String>,
+ visited_units: &mut HashSet<Unit<'a>>,
+ ) -> CargoResult<()> {
if !visited_units.insert(*unit) {
return Ok(());
}
Ok(())
}
- fn probe_target_info_kind(&mut self,
- crate_types: &BTreeSet<String>,
- kind: Kind)
- -> CargoResult<()> {
- let rustflags = env_args(self.config,
- &self.build_config,
- self.info(&kind),
- kind,
- "RUSTFLAGS")?;
+ fn probe_target_info_kind(
+ &mut self,
+ crate_types: &BTreeSet<String>,
+ kind: Kind,
+ ) -> CargoResult<()> {
+ let rustflags = env_args(
+ self.config,
+ &self.build_config,
+ self.info(&kind),
+ kind,
+ "RUSTFLAGS",
+ )?;
let mut process = self.config.rustc()?.process();
- process.arg("-")
- .arg("--crate-name").arg("___")
- .arg("--print=file-names")
- .args(&rustflags)
- .env_remove("RUST_LOG");
+ process
+ .arg("-")
+ .arg("--crate-name")
+ .arg("___")
+ .arg("--print=file-names")
+ .args(&rustflags)
+ .env_remove("RUST_LOG");
if kind == Kind::Target {
process.arg("--target").arg(&self.target_triple());
with_cfg.arg("--print=cfg");
let mut has_cfg_and_sysroot = true;
- let output = with_cfg.exec_with_output().or_else(|_| {
- has_cfg_and_sysroot = false;
- process.exec_with_output()
- }).chain_err(|| {
- "failed to run `rustc` to learn about target-specific information"
- })?;
+ let output = with_cfg
+ .exec_with_output()
+ .or_else(|_| {
+ has_cfg_and_sysroot = false;
+ process.exec_with_output()
+ })
+ .chain_err(|| "failed to run `rustc` to learn about target-specific information")?;
let error = str::from_utf8(&output.stderr).unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
if has_cfg_and_sysroot {
let line = match lines.next() {
Some(line) => line,
- None => bail!("output of --print=sysroot missing when learning about \
- target-specific information from rustc"),
+ None => bail!(
+ "output of --print=sysroot missing when learning about \
+ target-specific information from rustc"
+ ),
};
let mut rustlib = PathBuf::from(line);
if kind == Kind::Host {
///
/// This will recursively walk `units` and all of their dependencies to
/// determine which crate are going to be used in plugins or not.
- pub fn build_used_in_plugin_map(&mut self, units: &[Unit<'a>])
- -> CargoResult<()> {
+ pub fn build_used_in_plugin_map(&mut self, units: &[Unit<'a>]) -> CargoResult<()> {
let mut visited = HashSet::new();
for unit in units {
- self.walk_used_in_plugin_map(unit,
- unit.target.for_host(),
- &mut visited)?;
+ self.walk_used_in_plugin_map(unit, unit.target.for_host(), &mut visited)?;
}
Ok(())
}
- fn walk_used_in_plugin_map(&mut self,
- unit: &Unit<'a>,
- is_plugin: bool,
- visited: &mut HashSet<(Unit<'a>, bool)>)
- -> CargoResult<()> {
+ fn walk_used_in_plugin_map(
+ &mut self,
+ unit: &Unit<'a>,
+ is_plugin: bool,
+ visited: &mut HashSet<(Unit<'a>, bool)>,
+ ) -> CargoResult<()> {
if !visited.insert((*unit, is_plugin)) {
- return Ok(())
+ return Ok(());
}
if is_plugin {
self.used_in_plugin.insert(*unit);
}
for unit in self.dep_targets(unit)? {
- self.walk_used_in_plugin_map(&unit,
- is_plugin || unit.target.for_host(),
- visited)?;
+ self.walk_used_in_plugin_map(&unit, is_plugin || unit.target.for_host(), visited)?;
}
Ok(())
}
fn layout(&self, kind: Kind) -> &Layout {
match kind {
Kind::Host => &self.host,
- Kind::Target => self.target.as_ref().unwrap_or(&self.host)
+ Kind::Target => self.target.as_ref().unwrap_or(&self.host),
}
}
/// Return the target triple which this context is targeting.
pub fn target_triple(&self) -> &str {
- self.requested_target().unwrap_or_else(|| self.host_triple())
+ self.requested_target()
+ .unwrap_or_else(|| self.host_triple())
}
/// Requested (not actual) target for the build
/// like `target/debug/libfoo.{a,so,rlib}` and such.
pub fn target_metadata(&mut self, unit: &Unit<'a>) -> Option<Metadata> {
if let Some(cache) = self.target_metadatas.get(unit) {
- return cache.clone()
+ return cache.clone();
}
let metadata = self.calc_target_metadata(unit);
// just here for rustbuild. We need a more principled method
// doing this eventually.
let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA");
- if !(unit.profile.test || unit.profile.check) &&
- (unit.target.is_dylib() || unit.target.is_cdylib() ||
- (unit.target.is_bin() && self.target_triple().starts_with("wasm32-"))) &&
- unit.pkg.package_id().source_id().is_path() &&
- !__cargo_default_lib_metadata.is_ok()
+ if !(unit.profile.test || unit.profile.check)
+ && (unit.target.is_dylib() || unit.target.is_cdylib()
+ || (unit.target.is_bin() && self.target_triple().starts_with("wasm32-")))
+ && unit.pkg.package_id().source_id().is_path()
+ && !__cargo_default_lib_metadata.is_ok()
{
return None;
}
// Unique metadata per (name, source, version) triple. This'll allow us
// to pull crates from anywhere w/o worrying about conflicts
- unit.pkg.package_id().stable_hash(self.ws.root()).hash(&mut hasher);
+ unit.pkg
+ .package_id()
+ .stable_hash(self.ws.root())
+ .hash(&mut hasher);
// Add package properties which map to environment variables
// exposed by Cargo
// Also mix in enabled features to our metadata. This'll ensure that
// when changing feature sets each lib is separately cached.
- self.resolve.features_sorted(unit.pkg.package_id()).hash(&mut hasher);
+ self.resolve
+ .features_sorted(unit.pkg.package_id())
+ .hash(&mut hasher);
// Mix in the target-metadata of all the dependencies of this target
if let Ok(deps) = self.dep_targets(unit) {
- let mut deps_metadata = deps.into_iter().map(|dep_unit| {
- self.target_metadata(&dep_unit)
- }).collect::<Vec<_>>();
+ let mut deps_metadata = deps.into_iter()
+ .map(|dep_unit| self.target_metadata(&dep_unit))
+ .collect::<Vec<_>>();
deps_metadata.sort();
deps_metadata.hash(&mut hasher);
}
/// Returns the file stem for a given target/profile combo (with metadata)
pub fn file_stem(&mut self, unit: &Unit<'a>) -> String {
match self.target_metadata(unit) {
- Some(ref metadata) => format!("{}-{}", unit.target.crate_name(),
- metadata),
+ Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata),
None => self.bin_stem(unit),
}
}
// we don't want to link it up.
if src_dir.ends_with("deps") {
// Don't lift up library dependencies
- if self.ws.members().find(|&p| p == unit.pkg).is_none() &&
- !unit.target.is_bin() {
+ if self.ws.members().find(|&p| p == unit.pkg).is_none() && !unit.target.is_bin() {
None
} else {
Some((
src_dir.parent().unwrap().to_owned(),
- if unit.profile.test {file_stem} else {bin_stem},
+ if unit.profile.test {
+ file_stem
+ } else {
+ bin_stem
+ },
))
}
} else if bin_stem == file_stem {
None
- } else if src_dir.ends_with("examples")
- || src_dir.parent().unwrap().ends_with("build") {
+ } else if src_dir.ends_with("examples") || src_dir.parent().unwrap().ends_with("build") {
Some((src_dir, bin_stem))
} else {
None
/// - filename: filename rustc compiles to. (Often has metadata suffix).
/// - link_dst: Optional file to link/copy the result to (without metadata suffix)
/// - linkable: Whether possible to link against file (eg it's a library)
- pub fn target_filenames(&mut self, unit: &Unit<'a>)
- -> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
+ pub fn target_filenames(
+ &mut self,
+ unit: &Unit<'a>,
+ ) -> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
if let Some(cache) = self.target_filenames.get(unit) {
- return Ok(Arc::clone(cache))
+ return Ok(Arc::clone(cache));
}
let result = self.calc_target_filenames(unit);
result
}
- fn calc_target_filenames(&mut self, unit: &Unit<'a>)
- -> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
+ fn calc_target_filenames(
+ &mut self,
+ unit: &Unit<'a>,
+ ) -> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
let out_dir = self.out_dir(unit);
let stem = self.file_stem(unit);
let link_stem = self.link_stem(unit);
{
if unit.profile.check {
let filename = out_dir.join(format!("lib{}.rmeta", stem));
- let link_dst = link_stem.clone().map(|(ld, ls)| {
- ld.join(format!("lib{}.rmeta", ls))
- });
+ let link_dst = link_stem
+ .clone()
+ .map(|(ld, ls)| ld.join(format!("lib{}.rmeta", ls)));
ret.push((filename, link_dst, TargetFileType::Linkable));
} else {
let mut add = |crate_type: &str, file_type: TargetFileType| -> CargoResult<()> {
- let crate_type = if crate_type == "lib" {"rlib"} else {crate_type};
+ let crate_type = if crate_type == "lib" {
+ "rlib"
+ } else {
+ crate_type
+ };
let mut crate_types = info.crate_types.borrow_mut();
let entry = crate_types.entry(crate_type.to_string());
let crate_type_info = match entry {
// "-" and "_". should_replace_hyphens is a flag to indicate that
// we need to convert the stem "web-stuff" to "web_stuff", so we
// won't miss "web_stuff.wasm".
- let conv = |s: String| if should_replace_hyphens {
- s.replace("-", "_")
- } else {
- s
+ let conv = |s: String| {
+ if should_replace_hyphens {
+ s.replace("-", "_")
+ } else {
+ s
+ }
};
- let filename =
- out_dir.join(format!("{}{}{}", prefix, conv(stem.clone()), suffix));
+ let filename = out_dir.join(format!(
+ "{}{}{}",
+ prefix,
+ conv(stem.clone()),
+ suffix
+ ));
let link_dst = link_stem.clone().map(|(ld, ls)| {
ld.join(format!("{}{}{}", prefix, conv(ls), suffix))
});
};
//info!("{:?}", unit);
match *unit.target.kind() {
- TargetKind::Bin |
- TargetKind::CustomBuild |
- TargetKind::ExampleBin |
- TargetKind::Bench |
- TargetKind::Test => {
+ TargetKind::Bin
+ | TargetKind::CustomBuild
+ | TargetKind::ExampleBin
+ | TargetKind::Bench
+ | TargetKind::Test => {
add("bin", TargetFileType::Normal)?;
}
- TargetKind::Lib(..) |
- TargetKind::ExampleLib(..)
- if unit.profile.test => {
+ TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.profile.test => {
add("bin", TargetFileType::Normal)?;
}
- TargetKind::ExampleLib(ref kinds) |
- TargetKind::Lib(ref kinds) => {
+ TargetKind::ExampleLib(ref kinds) | TargetKind::Lib(ref kinds) => {
for kind in kinds {
- add(kind.crate_type(), if kind.linkable() {
- TargetFileType::Linkable
- } else {
- TargetFileType::Normal
- })?;
+ add(
+ kind.crate_type(),
+ if kind.linkable() {
+ TargetFileType::Linkable
+ } else {
+ TargetFileType::Normal
+ },
+ )?;
}
}
}
}
if ret.is_empty() {
if !unsupported.is_empty() {
- bail!("cannot produce {} for `{}` as the target `{}` \
- does not support these crate types",
- unsupported.join(", "), unit.pkg, self.target_triple())
+ bail!(
+ "cannot produce {} for `{}` as the target `{}` \
+ does not support these crate types",
+ unsupported.join(", "),
+ unit.pkg,
+ self.target_triple()
+ )
}
- bail!("cannot compile `{}` as the target `{}` does not \
- support any of the output crate types",
- unit.pkg, self.target_triple());
+ bail!(
+ "cannot compile `{}` as the target `{}` does not \
+ support any of the output crate types",
+ unit.pkg,
+ self.target_triple()
+ );
}
info!("Target filenames: {:?}", ret);
/// for that package.
pub fn dep_targets(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
if unit.profile.run_custom_build {
- return self.dep_run_custom_build(unit)
+ return self.dep_run_custom_build(unit);
} else if unit.profile.doc && !unit.profile.test {
return self.doc_deps(unit);
}
let id = unit.pkg.package_id();
let deps = self.resolve.deps(id);
let mut ret = deps.filter(|dep| {
- unit.pkg.dependencies().iter().filter(|d| {
- d.name() == dep.name() && d.version_req().matches(dep.version())
- }).any(|d| {
- // If this target is a build command, then we only want build
- // dependencies, otherwise we want everything *other than* build
- // dependencies.
- if unit.target.is_custom_build() != d.is_build() {
- return false
- }
+ unit.pkg
+ .dependencies()
+ .iter()
+ .filter(|d| d.name() == dep.name() && d.version_req().matches(dep.version()))
+ .any(|d| {
+ // If this target is a build command, then we only want build
+ // dependencies, otherwise we want everything *other than* build
+ // dependencies.
+ if unit.target.is_custom_build() != d.is_build() {
+ return false;
+ }
- // If this dependency is *not* a transitive dependency, then it
- // only applies to test/example targets
- if !d.is_transitive() && !unit.target.is_test() &&
- !unit.target.is_example() && !unit.profile.test {
- return false
- }
+ // If this dependency is *not* a transitive dependency, then it
+ // only applies to test/example targets
+ if !d.is_transitive() && !unit.target.is_test() && !unit.target.is_example()
+ && !unit.profile.test
+ {
+ return false;
+ }
- // If this dependency is only available for certain platforms,
- // make sure we're only enabling it for that platform.
- if !self.dep_platform_activated(d, unit.kind) {
- return false
- }
+ // If this dependency is only available for certain platforms,
+ // make sure we're only enabling it for that platform.
+ if !self.dep_platform_activated(d, unit.kind) {
+ return false;
+ }
- // If the dependency is optional, then we're only activating it
- // if the corresponding feature was activated
- if d.is_optional() && !self.resolve.features(id).contains(&*d.name()) {
- return false;
- }
+ // If the dependency is optional, then we're only activating it
+ // if the corresponding feature was activated
+ if d.is_optional() && !self.resolve.features(id).contains(&*d.name()) {
+ return false;
+ }
- // If we've gotten past all that, then this dependency is
- // actually used!
- true
+ // If we've gotten past all that, then this dependency is
+ // actually used!
+ true
+ })
+ }).filter_map(|id| match self.get_package(id) {
+ Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
+ let unit = Unit {
+ pkg,
+ target: t,
+ profile: self.lib_or_check_profile(unit, t),
+ kind: unit.kind.for_target(t),
+ };
+ Ok(unit)
+ }),
+ Err(e) => Some(Err(e)),
})
- }).filter_map(|id| {
- match self.get_package(id) {
- Ok(pkg) => {
- pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
- let unit = Unit {
- pkg,
- target: t,
- profile: self.lib_or_check_profile(unit, t),
- kind: unit.kind.for_target(t),
- };
- Ok(unit)
- })
- }
- Err(e) => Some(Err(e))
- }
- }).collect::<CargoResult<Vec<_>>>()?;
+ .collect::<CargoResult<Vec<_>>>()?;
// If this target is a build script, then what we've collected so far is
// all we need. If this isn't a build script, then it depends on the
// build script if there is one.
if unit.target.is_custom_build() {
- return Ok(ret)
+ return Ok(ret);
}
ret.extend(self.dep_build_script(unit));
// didn't include `pkg` in the return values, so we need to special case
// it here and see if we need to push `(pkg, pkg_lib_target)`.
if unit.target.is_lib() && !unit.profile.doc {
- return Ok(ret)
+ return Ok(ret);
}
ret.extend(self.maybe_lib(unit));
// Integration tests/benchmarks require binaries to be built
- if unit.profile.test &&
- (unit.target.is_test() || unit.target.is_bench()) {
- ret.extend(unit.pkg.targets().iter().filter(|t| {
- let no_required_features = Vec::new();
-
- t.is_bin() &&
+ if unit.profile.test && (unit.target.is_test() || unit.target.is_bench()) {
+ ret.extend(
+ unit.pkg
+ .targets()
+ .iter()
+ .filter(|t| {
+ let no_required_features = Vec::new();
+
+ t.is_bin() &&
// Skip binaries with required features that have not been selected.
t.required_features().unwrap_or(&no_required_features).iter().all(|f| {
self.resolve.features(id).contains(f)
})
- }).map(|t| {
- Unit {
- pkg: unit.pkg,
- target: t,
- profile: self.lib_or_check_profile(unit, t),
- kind: unit.kind.for_target(t),
- }
- }));
+ })
+ .map(|t| Unit {
+ pkg: unit.pkg,
+ target: t,
+ profile: self.lib_or_check_profile(unit, t),
+ kind: unit.kind.for_target(t),
+ }),
+ );
}
Ok(ret)
}
///
/// The `unit` provided must represent an execution of a build script, and
/// the returned set of units must all be run before `unit` is run.
- pub fn dep_run_custom_build(&self, unit: &Unit<'a>)
- -> CargoResult<Vec<Unit<'a>>> {
+ pub fn dep_run_custom_build(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
// If this build script's execution has been overridden then we don't
// actually depend on anything, we've reached the end of the dependency
// chain as we've got all the info we're gonna get.
let key = (unit.pkg.package_id().clone(), unit.kind);
if self.build_script_overridden.contains(&key) {
- return Ok(Vec::new())
+ return Ok(Vec::new());
}
// When not overridden, then the dependencies to run a build script are:
// 1. Compiling the build script itself
// 2. For each immediate dependency of our package which has a `links`
// key, the execution of that build script.
- let not_custom_build = unit.pkg.targets().iter().find(|t| {
- !t.is_custom_build()
- }).unwrap();
+ let not_custom_build = unit.pkg
+ .targets()
+ .iter()
+ .find(|t| !t.is_custom_build())
+ .unwrap();
let tmp = Unit {
target: not_custom_build,
profile: &self.profiles.dev,
..*unit
};
let deps = self.dep_targets(&tmp)?;
- Ok(deps.iter().filter_map(|unit| {
- if !unit.target.linkable() || unit.pkg.manifest().links().is_none() {
- return None
- }
- self.dep_build_script(unit)
- }).chain(Some(Unit {
- profile: self.build_script_profile(unit.pkg.package_id()),
- kind: Kind::Host, // build scripts always compiled for the host
- ..*unit
- })).collect())
+ Ok(deps.iter()
+ .filter_map(|unit| {
+ if !unit.target.linkable() || unit.pkg.manifest().links().is_none() {
+ return None;
+ }
+ self.dep_build_script(unit)
+ })
+ .chain(Some(Unit {
+ profile: self.build_script_profile(unit.pkg.package_id()),
+ kind: Kind::Host, // build scripts always compiled for the host
+ ..*unit
+ }))
+ .collect())
}
/// Returns the dependencies necessary to document a package
fn doc_deps(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
- let deps = self.resolve.deps(unit.pkg.package_id()).filter(|dep| {
- unit.pkg.dependencies().iter().filter(|d| {
- d.name() == dep.name()
- }).any(|dep| {
- match dep.kind() {
- DepKind::Normal => self.dep_platform_activated(dep,
- unit.kind),
- _ => false,
- }
+ let deps = self.resolve
+ .deps(unit.pkg.package_id())
+ .filter(|dep| {
+ unit.pkg
+ .dependencies()
+ .iter()
+ .filter(|d| d.name() == dep.name())
+ .any(|dep| match dep.kind() {
+ DepKind::Normal => self.dep_platform_activated(dep, unit.kind),
+ _ => false,
+ })
})
- }).map(|dep| {
- self.get_package(dep)
- });
+ .map(|dep| self.get_package(dep));
// To document a library, we depend on dependencies actually being
// built. If we're documenting *all* libraries, then we also depend on
/// of work is still returned. `None` is only returned if the package has no
/// build script.
fn dep_build_script(&self, unit: &Unit<'a>) -> Option<Unit<'a>> {
- unit.pkg.targets().iter().find(|t| t.is_custom_build()).map(|t| {
- Unit {
+ unit.pkg
+ .targets()
+ .iter()
+ .find(|t| t.is_custom_build())
+ .map(|t| Unit {
pkg: unit.pkg,
target: t,
profile: &self.profiles.custom_build,
kind: unit.kind,
- }
- })
+ })
}
fn maybe_lib(&self, unit: &Unit<'a>) -> Option<Unit<'a>> {
- unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| {
- Unit {
+ unit.pkg
+ .targets()
+ .iter()
+ .find(|t| t.linkable())
+ .map(|t| Unit {
pkg: unit.pkg,
target: t,
profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t),
- }
- })
+ })
}
fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool {
}
/// Number of jobs specified for this build
- pub fn jobs(&self) -> u32 { self.build_config.jobs }
+ pub fn jobs(&self) -> u32 {
+ self.build_config.jobs
+ }
pub fn lib_profile(&self) -> &'a Profile {
let (normal, test) = if self.build_config.release {
pub fn lib_or_check_profile(&self, unit: &Unit, target: &Target) -> &'a Profile {
if !target.is_custom_build() && !target.for_host()
- && (unit.profile.check || (unit.profile.doc && !unit.profile.test)) {
- return &self.profiles.check
+ && (unit.profile.check || (unit.profile.doc && !unit.profile.test))
+ {
+ return &self.profiles.check;
}
self.lib_profile()
}
};
if !incremental {
- return Ok(Vec::new())
+ return Ok(Vec::new());
}
// Only enable incremental compilation for sources the user can
//
// (see also https://github.com/rust-lang/cargo/issues/3972)
if !unit.pkg.package_id().source_id().is_path() {
- return Ok(Vec::new())
+ return Ok(Vec::new());
}
let dir = self.layout(unit.kind).incremental().display();
- Ok(vec![
- "-C".to_string(),
- format!("incremental={}", dir),
- ])
+ Ok(vec!["-C".to_string(), format!("incremental={}", dir)])
}
pub fn rustflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
- env_args(self.config, &self.build_config, self.info(&unit.kind), unit.kind, "RUSTFLAGS")
+ env_args(
+ self.config,
+ &self.build_config,
+ self.info(&unit.kind),
+ unit.kind,
+ "RUSTFLAGS",
+ )
}
pub fn rustdocflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
- env_args(self.config, &self.build_config, self.info(&unit.kind), unit.kind, "RUSTDOCFLAGS")
+ env_args(
+ self.config,
+ &self.build_config,
+ self.info(&unit.kind),
+ unit.kind,
+ "RUSTDOCFLAGS",
+ )
}
pub fn show_warnings(&self, pkg: &PackageId) -> bool {
///
/// Note that if a `target` is specified, no args will be passed to host code (plugins, build
/// scripts, ...), even if it is the same as the target.
-fn env_args(config: &Config,
- build_config: &BuildConfig,
- target_info: &TargetInfo,
- kind: Kind,
- name: &str) -> CargoResult<Vec<String>> {
+fn env_args(
+ config: &Config,
+ build_config: &BuildConfig,
+ target_info: &TargetInfo,
+ kind: Kind,
+ name: &str,
+) -> CargoResult<Vec<String>> {
// We *want* to apply RUSTFLAGS only to builds for the
// requested target architecture, and not to things like build
// scripts and plugins, which may be for an entirely different
let mut rustflags = Vec::new();
- let name = name.chars().flat_map(|c| c.to_lowercase()).collect::<String>();
+ let name = name.chars()
+ .flat_map(|c| c.to_lowercase())
+ .collect::<String>();
// Then the target.*.rustflags value...
- let target = build_config.requested_target.as_ref().unwrap_or(&build_config.host_triple);
+ let target = build_config
+ .requested_target
+ .as_ref()
+ .unwrap_or(&build_config.host_triple);
let key = format!("target.{}.{}", target, name);
if let Some(args) = config.get_list_or_split_string(&key)? {
let args = args.val.into_iter();
let cfgs = table.val.keys().filter_map(|t| {
if t.starts_with("cfg(") && t.ends_with(')') {
let cfg = &t[4..t.len() - 1];
- CfgExpr::from_str(cfg)
- .ok()
- .and_then(|c| if c.matches(target_cfg) { Some(t) } else { None })
+ CfgExpr::from_str(cfg).ok().and_then(|c| {
+ if c.matches(target_cfg) {
+ Some(t)
+ } else {
+ None
+ }
+ })
} else {
None
}
lines: &mut str::Lines,
) -> CargoResult<Option<(String, String)>> {
let not_supported = error.lines().any(|line| {
- (line.contains("unsupported crate type") ||
- line.contains("unknown crate type")) &&
- line.contains(crate_type)
+ (line.contains("unsupported crate type") || line.contains("unknown crate type"))
+ && line.contains(crate_type)
});
if not_supported {
return Ok(None);
}
let line = match lines.next() {
Some(line) => line,
- None => bail!("malformed output when learning about \
- crate-type {} information", crate_type),
+ None => bail!(
+ "malformed output when learning about \
+ crate-type {} information",
+ crate_type
+ ),
};
let mut parts = line.trim().split("___");
let prefix = parts.next().unwrap();
let suffix = match parts.next() {
Some(part) => part,
- None => bail!("output of --print=file-names has changed in \
- the compiler, cannot parse"),
+ None => bail!(
+ "output of --print=file-names has changed in \
+ the compiler, cannot parse"
+ ),
};
Ok(Some((prefix.to_string(), suffix.to_string())))
let mut ret = vec![(suffix.to_string(), file_type, false)];
// rust-lang/cargo#4500
- if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib") &&
- suffix == ".dll"
+ if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib")
+ && suffix == ".dll"
{
ret.push((".dll.lib".to_string(), TargetFileType::Normal, false));
}
// rust-lang/cargo#4535
- if target_triple.starts_with("wasm32-") && crate_type == "bin" &&
- suffix == ".js"
- {
+ if target_triple.starts_with("wasm32-") && crate_type == "bin" && suffix == ".js" {
ret.push((".wasm".to_string(), TargetFileType::Normal, true));
}
-use std::collections::{HashMap, BTreeSet, HashSet};
+use std::collections::{BTreeSet, HashMap, HashSet};
use std::fs;
-use std::path::{PathBuf, Path};
+use std::path::{Path, PathBuf};
use std::str;
-use std::sync::{Mutex, Arc};
+use std::sync::{Arc, Mutex};
use core::PackageId;
-use util::{Freshness, Cfg};
+use util::{Cfg, Freshness};
use util::errors::{CargoResult, CargoResultExt};
-use util::{self, internal, profile, paths};
+use util::{self, internal, paths, profile};
use util::machine_message;
use super::job::Work;
-use super::{fingerprint, Kind, Context, Unit};
+use super::{fingerprint, Context, Kind, Unit};
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
-pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
- -> CargoResult<(Work, Work, Freshness)> {
- let _p = profile::start(format!("build script prepare: {}/{}",
- unit.pkg, unit.target.name()));
+pub fn prepare<'a, 'cfg>(
+ cx: &mut Context<'a, 'cfg>,
+ unit: &Unit<'a>,
+) -> CargoResult<(Work, Work, Freshness)> {
+ let _p = profile::start(format!(
+ "build script prepare: {}/{}",
+ unit.pkg,
+ unit.target.name()
+ ));
let key = (unit.pkg.package_id().clone(), unit.kind);
let overridden = cx.build_script_overridden.contains(&key);
// Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards.
- let (freshness, dirty, fresh) =
- fingerprint::prepare_build_cmd(cx, unit)?;
+ let (freshness, dirty, fresh) = fingerprint::prepare_build_cmd(cx, unit)?;
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
}
-fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
- -> CargoResult<(Work, Work)> {
+fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> {
let dependencies = cx.dep_run_custom_build(unit)?;
- let build_script_unit = dependencies.iter().find(|d| {
- !d.profile.run_custom_build && d.target.is_custom_build()
- }).expect("running a script not depending on an actual script");
+ let build_script_unit = dependencies
+ .iter()
+ .find(|d| !d.profile.run_custom_build && d.target.is_custom_build())
+ .expect("running a script not depending on an actual script");
let script_output = cx.build_script_dir(build_script_unit);
let build_output = cx.build_script_out_dir(unit);
let to_exec = to_exec.into_os_string();
let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?;
cmd.env("OUT_DIR", &build_output)
- .env("CARGO_MANIFEST_DIR", unit.pkg.root())
- .env("NUM_JOBS", &cx.jobs().to_string())
- .env("TARGET", &match unit.kind {
- Kind::Host => cx.host_triple(),
- Kind::Target => cx.target_triple(),
- })
- .env("DEBUG", &profile.debuginfo.is_some().to_string())
- .env("OPT_LEVEL", &profile.opt_level)
- .env("PROFILE", if cx.build_config.release { "release" } else { "debug" })
- .env("HOST", cx.host_triple())
- .env("RUSTC", &cx.config.rustc()?.path)
- .env("RUSTDOC", &*cx.config.rustdoc()?)
- .inherit_jobserver(&cx.jobserver);
+ .env("CARGO_MANIFEST_DIR", unit.pkg.root())
+ .env("NUM_JOBS", &cx.jobs().to_string())
+ .env(
+ "TARGET",
+ &match unit.kind {
+ Kind::Host => cx.host_triple(),
+ Kind::Target => cx.target_triple(),
+ },
+ )
+ .env("DEBUG", &profile.debuginfo.is_some().to_string())
+ .env("OPT_LEVEL", &profile.opt_level)
+ .env(
+ "PROFILE",
+ if cx.build_config.release {
+ "release"
+ } else {
+ "debug"
+ },
+ )
+ .env("HOST", cx.host_triple())
+ .env("RUSTC", &cx.config.rustc()?.path)
+ .env("RUSTDOC", &*cx.config.rustdoc()?)
+ .inherit_jobserver(&cx.jobserver);
if let Some(links) = unit.pkg.manifest().links() {
cmd.env("CARGO_MANIFEST_LINKS", links);
let mut cfg_map = HashMap::new();
for cfg in cx.cfg(unit.kind) {
match *cfg {
- Cfg::Name(ref n) => { cfg_map.insert(n.clone(), None); }
+ Cfg::Name(ref n) => {
+ cfg_map.insert(n.clone(), None);
+ }
Cfg::KeyPair(ref k, ref v) => {
- if let Some(ref mut values) = *cfg_map.entry(k.clone()).or_insert_with(||Some(Vec::new())) {
+ if let Some(ref mut values) =
+ *cfg_map.entry(k.clone()).or_insert_with(|| Some(Vec::new()))
+ {
values.push(v.clone())
}
}
for (k, v) in cfg_map {
let k = format!("CARGO_CFG_{}", super::envify(&k));
match v {
- Some(list) => { cmd.env(&k, list.join(",")); }
- None => { cmd.env(&k, ""); }
+ Some(list) => {
+ cmd.env(&k, list.join(","));
+ }
+ None => {
+ cmd.env(&k, "");
+ }
}
}
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
- dependencies.iter().filter_map(|unit| {
- if unit.profile.run_custom_build {
- Some((unit.pkg.manifest().links().unwrap().to_string(),
- unit.pkg.package_id().clone()))
- } else {
- None
- }
- }).collect::<Vec<_>>()
+ dependencies
+ .iter()
+ .filter_map(|unit| {
+ if unit.profile.run_custom_build {
+ Some((
+ unit.pkg.manifest().links().unwrap().to_string(),
+ unit.pkg.package_id().clone(),
+ ))
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_state = Arc::clone(&cx.build_state);
(output_file, err_file, root_output_file)
};
let root_output = cx.target_root().to_path_buf();
- let all = (id.clone(), pkg_name.clone(), Arc::clone(&build_state),
- output_file.clone(), root_output.clone());
+ let all = (
+ id.clone(),
+ pkg_name.clone(),
+ Arc::clone(&build_state),
+ output_file.clone(),
+ root_output.clone(),
+ );
let build_scripts = super::load_build_deps(cx, unit);
let kind = unit.kind;
let json_messages = cx.build_config.json_messages;
let prev_root_output = paths::read_bytes(&root_output_file)
.and_then(|bytes| util::bytes2path(&bytes))
.unwrap_or_else(|_| cmd.get_cwd().unwrap().to_path_buf());
- let prev_output = BuildOutput::parse_file(
- &output_file,
- &pkg_name,
- &prev_root_output,
- &root_output,
- ).ok();
+ let prev_output =
+ BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output).ok();
let deps = BuildDeps::new(&output_file, prev_output.as_ref());
cx.build_explicit_deps.insert(*unit, deps);
// otherwise create it!
if fs::metadata(&build_output).is_err() {
fs::create_dir(&build_output).chain_err(|| {
- internal("failed to create script output directory for \
- build command")
+ internal(
+ "failed to create script output directory for \
+ build command",
+ )
})?;
}
for (name, id) in lib_deps {
let key = (id.clone(), kind);
let state = build_state.get(&key).ok_or_else(|| {
- internal(format!("failed to locate build state for env \
- vars: {}/{:?}", id, kind))
+ internal(format!(
+ "failed to locate build state for env \
+ vars: {}/{:?}",
+ id, kind
+ ))
})?;
let data = &state.metadata;
for &(ref key, ref value) in data.iter() {
- cmd.env(&format!("DEP_{}_{}", super::envify(&name),
- super::envify(key)), value);
+ cmd.env(
+ &format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
+ value,
+ );
}
}
if let Some(build_scripts) = build_scripts {
- super::add_plugin_deps(&mut cmd, &build_state,
- &build_scripts,
- &root_output)?;
+ super::add_plugin_deps(&mut cmd, &build_state, &build_scripts, &root_output)?;
}
}
// And now finally, run the build command itself!
state.running(&cmd);
let output = cmd.exec_with_streaming(
- &mut |out_line| { state.stdout(out_line); Ok(()) },
- &mut |err_line| { state.stderr(err_line); Ok(()) },
+ &mut |out_line| {
+ state.stdout(out_line);
+ Ok(())
+ },
+ &mut |err_line| {
+ state.stderr(err_line);
+ Ok(())
+ },
true,
).map_err(|e| {
- format_err!("failed to run custom build command for `{}`\n{}",
- pkg_name, e)
-
+ format_err!(
+ "failed to run custom build command for `{}`\n{}",
+ pkg_name,
+ e
+ )
})?;
-
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
paths::write(&output_file, &output.stdout)?;
paths::write(&err_file, &output.stderr)?;
paths::write(&root_output_file, util::path2bytes(&root_output)?)?;
- let parsed_output = BuildOutput::parse(
- &output.stdout,
- &pkg_name,
- &root_output,
- &root_output,
- )?;
+ let parsed_output =
+ BuildOutput::parse(&output.stdout, &pkg_name, &root_output, &root_output)?;
if json_messages {
- let library_paths = parsed_output.library_paths.iter().map(|l| {
- l.display().to_string()
- }).collect::<Vec<_>>();
+ let library_paths = parsed_output
+ .library_paths
+ .iter()
+ .map(|l| l.display().to_string())
+ .collect::<Vec<_>>();
machine_message::emit(&machine_message::BuildScript {
package_id: &id,
linked_libs: &parsed_output.library_links,
let output = match prev_output {
Some(output) => output,
None => {
- BuildOutput::parse_file(
- &output_file,
- &pkg_name,
- &prev_root_output,
- &root_output,
- )?
+ BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output)?
}
};
build_state.insert(id, kind, output);
}
impl BuildOutput {
- pub fn parse_file(path: &Path,
- pkg_name: &str,
- root_output_when_generated: &Path,
- root_output: &Path) -> CargoResult<BuildOutput> {
+ pub fn parse_file(
+ path: &Path,
+ pkg_name: &str,
+ root_output_when_generated: &Path,
+ root_output: &Path,
+ ) -> CargoResult<BuildOutput> {
let contents = paths::read_bytes(path)?;
BuildOutput::parse(&contents, pkg_name, root_output_when_generated, root_output)
}
// Parses the output of a script.
// The `pkg_name` is used for error messages.
- pub fn parse(input: &[u8],
- pkg_name: &str,
- root_output_when_generated: &Path,
- root_output: &Path) -> CargoResult<BuildOutput> {
+ pub fn parse(
+ input: &[u8],
+ pkg_name: &str,
+ root_output_when_generated: &Path,
+ root_output: &Path,
+ ) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
}
let data = match iter.next() {
Some(val) => val,
- None => continue
+ None => continue,
};
// getting the `key=value` part of the line
_ => bail!("Wrong output in {}: `{}`", whence, line),
};
- let path = |val: &str| {
- match Path::new(val).strip_prefix(root_output_when_generated) {
- Ok(path) => root_output.join(path),
- Err(_) => PathBuf::from(val),
- }
+ let path = |val: &str| match Path::new(val).strip_prefix(root_output_when_generated) {
+ Ok(path) => root_output.join(path),
+ Err(_) => PathBuf::from(val),
};
match key {
"rustc-flags" => {
- let (paths, links) =
- BuildOutput::parse_rustc_flags(value, &whence)?;
+ let (paths, links) = BuildOutput::parse_rustc_flags(value, &whence)?;
library_links.extend(links.into_iter());
library_paths.extend(paths.into_iter());
}
})
}
- pub fn parse_rustc_flags(value: &str, whence: &str)
- -> CargoResult<(Vec<PathBuf>, Vec<String>)> {
+ pub fn parse_rustc_flags(
+ value: &str,
+ whence: &str,
+ ) -> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
- let mut flags_iter = value.split(|c: char| c.is_whitespace())
- .filter(|w| w.chars().any(|c| !c.is_whitespace()));
+ let mut flags_iter = value
+ .split(|c: char| c.is_whitespace())
+ .filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_paths, mut library_links) = (Vec::new(), Vec::new());
while let Some(flag) = flags_iter.next() {
if flag != "-l" && flag != "-L" {
- bail!("Only `-l` and `-L` flags are allowed in {}: `{}`",
- whence, value)
+ bail!(
+ "Only `-l` and `-L` flags are allowed in {}: `{}`",
+ whence,
+ value
+ )
}
let value = match flags_iter.next() {
Some(v) => v,
- None => bail!("Flag in rustc-flags has no value in {}: `{}`",
- whence, value)
+ None => bail!(
+ "Flag in rustc-flags has no value in {}: `{}`",
+ whence,
+ value
+ ),
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
- _ => bail!("only -l and -L flags are allowed")
+ _ => bail!("only -l and -L flags are allowed"),
};
}
Ok((library_paths, library_links))
}
- pub fn parse_rustc_env(value: &str, whence: &str)
- -> CargoResult<(String, String)> {
+ pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> {
let mut iter = value.splitn(2, '=');
let name = iter.next();
let val = iter.next();
pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps {
BuildDeps {
build_script_output: output_file.to_path_buf(),
- rerun_if_changed: output.map(|p| &p.rerun_if_changed)
- .cloned()
- .unwrap_or_default(),
- rerun_if_env_changed: output.map(|p| &p.rerun_if_env_changed)
- .cloned()
- .unwrap_or_default(),
+ rerun_if_changed: output
+ .map(|p| &p.rerun_if_changed)
+ .cloned()
+ .unwrap_or_default(),
+ rerun_if_env_changed: output
+ .map(|p| &p.rerun_if_env_changed)
+ .cloned()
+ .unwrap_or_default(),
}
}
}
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
-pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
- units: &[Unit<'b>])
- -> CargoResult<()> {
+pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> {
let mut ret = HashMap::new();
for unit in units {
build(&mut ret, cx, unit)?;
}
- cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {
- (k, Arc::new(v))
- }));
+ cx.build_scripts
+ .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v))));
return Ok(());
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
- fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
- cx: &mut Context<'b, 'cfg>,
- unit: &Unit<'b>)
- -> CargoResult<&'a BuildScripts> {
+ fn build<'a, 'b, 'cfg>(
+ out: &'a mut HashMap<Unit<'b>, BuildScripts>,
+ cx: &mut Context<'b, 'cfg>,
+ unit: &Unit<'b>,
+ ) -> CargoResult<&'a BuildScripts> {
// Do a quick pre-flight check to see if we've already calculated the
// set of dependencies.
if out.contains_key(unit) {
- return Ok(&out[unit])
+ return Ok(&out[unit]);
}
{
- let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind));
+ let key = unit.pkg
+ .manifest()
+ .links()
+ .map(|l| (l.to_string(), unit.kind));
let build_state = &cx.build_state;
if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) {
let key = (unit.pkg.package_id().clone(), unit.kind);
let dep_scripts = build(out, cx, unit)?;
if unit.target.for_host() {
- ret.plugins.extend(dep_scripts.to_link.iter()
- .map(|p| &p.0).cloned());
+ ret.plugins
+ .extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned());
} else if unit.target.linkable() {
for &(ref pkg, kind) in dep_scripts.to_link.iter() {
add_to_link(&mut ret, pkg, kind);
use core::{Epoch, Package, TargetKind};
use util;
-use util::{Fresh, Dirty, Freshness, internal, profile};
+use util::{internal, profile, Dirty, Fresh, Freshness};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
use super::job::Work;
-use super::context::{Context, Unit, TargetFileType};
+use super::context::{Context, TargetFileType, Unit};
use super::custom_build::BuildDeps;
/// A tuple result of the `prepare_foo` functions in this module.
/// This function will calculate the fingerprint for a target and prepare the
/// work necessary to either write the fingerprint or copy over all fresh files
/// from the old directories to their new locations.
-pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- unit: &Unit<'a>) -> CargoResult<Preparation> {
- let _p = profile::start(format!("fingerprint: {} / {}",
- unit.pkg.package_id(), unit.target.name()));
+pub fn prepare_target<'a, 'cfg>(
+ cx: &mut Context<'a, 'cfg>,
+ unit: &Unit<'a>,
+) -> CargoResult<Preparation> {
+ let _p = profile::start(format!(
+ "fingerprint: {} / {}",
+ unit.pkg.package_id(),
+ unit.target.name()
+ ));
let new = cx.fingerprint_dir(unit);
let loc = new.join(&filename(cx, unit));
if compare.is_err() {
let source_id = unit.pkg.package_id().source_id();
let sources = cx.packages.sources();
- let source = sources.get(source_id).ok_or_else(|| {
- internal("missing package source")
- })?;
+ let source = sources
+ .get(source_id)
+ .ok_or_else(|| internal("missing package source"))?;
source.verify(unit.pkg.package_id())?;
}
let mut missing_outputs = false;
if unit.profile.doc {
missing_outputs = !root.join(unit.target.crate_name())
- .join("index.html").exists();
+ .join("index.html")
+ .exists();
} else {
for &(ref src, ref link_dst, file_type) in cx.target_filenames(unit)?.iter() {
if file_type == TargetFileType::DebugInfo {
match fingerprint.update_local(&target_root) {
Ok(()) => {}
Err(..) if allow_failure => return Ok(()),
- Err(e) => return Err(e)
+ Err(e) => return Err(e),
}
write_fingerprint(&loc, &*fingerprint)
});
let fresh = compare.is_ok() && !missing_outputs;
- Ok((if fresh {Fresh} else {Dirty}, write_fingerprint, Work::noop()))
+ Ok((
+ if fresh { Fresh } else { Dirty },
+ write_fingerprint,
+ Work::noop(),
+ ))
}
/// A fingerprint can be considered to be a "short string" representing the
#[serde(serialize_with = "serialize_deps", deserialize_with = "deserialize_deps")]
deps: Vec<(String, Arc<Fingerprint>)>,
local: Vec<LocalFingerprint>,
- #[serde(skip_serializing, skip_deserializing)]
- memoized_hash: Mutex<Option<u64>>,
+ #[serde(skip_serializing, skip_deserializing)] memoized_hash: Mutex<Option<u64>>,
rustflags: Vec<String>,
epoch: Epoch,
}
-fn serialize_deps<S>(deps: &[(String, Arc<Fingerprint>)], ser: S)
- -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+fn serialize_deps<S>(deps: &[(String, Arc<Fingerprint>)], ser: S) -> Result<S::Ok, S::Error>
+where
+ S: ser::Serializer,
{
- deps.iter().map(|&(ref a, ref b)| {
- (a, b.hash())
- }).collect::<Vec<_>>().serialize(ser)
+ deps.iter()
+ .map(|&(ref a, ref b)| (a, b.hash()))
+ .collect::<Vec<_>>()
+ .serialize(ser)
}
fn deserialize_deps<'de, D>(d: D) -> Result<Vec<(String, Arc<Fingerprint>)>, D::Error>
- where D: de::Deserializer<'de>,
+where
+ D: de::Deserializer<'de>,
{
let decoded = <Vec<(String, u64)>>::deserialize(d)?;
- Ok(decoded.into_iter().map(|(name, hash)| {
- (name, Arc::new(Fingerprint {
- rustc: 0,
- target: 0,
- profile: 0,
- path: 0,
- local: vec![LocalFingerprint::Precalculated(String::new())],
- features: String::new(),
- deps: Vec::new(),
- memoized_hash: Mutex::new(Some(hash)),
- epoch: Epoch::Epoch2015,
- rustflags: Vec::new(),
- }))
- }).collect())
+ Ok(decoded
+ .into_iter()
+ .map(|(name, hash)| {
+ (
+ name,
+ Arc::new(Fingerprint {
+ rustc: 0,
+ target: 0,
+ profile: 0,
+ path: 0,
+ local: vec![LocalFingerprint::Precalculated(String::new())],
+ features: String::new(),
+ deps: Vec::new(),
+ memoized_hash: Mutex::new(Some(hash)),
+ epoch: Epoch::Epoch2015,
+ rustflags: Vec::new(),
+ }),
+ )
+ })
+ .collect())
}
#[derive(Serialize, Deserialize, Hash)]
}
impl LocalFingerprint {
- fn mtime(root: &Path, mtime: Option<FileTime>, path: &Path)
- -> LocalFingerprint
- {
+ fn mtime(root: &Path, mtime: Option<FileTime>, path: &Path) -> LocalFingerprint {
let mtime = MtimeSlot(Mutex::new(mtime));
assert!(path.is_absolute());
let path = path.strip_prefix(root).unwrap_or(path);
LocalFingerprint::MtimeBased(ref slot, ref path) => {
let path = root.join(path);
let meta = fs::metadata(&path)
- .chain_err(|| {
- internal(format!("failed to stat `{}`", path.display()))
- })?;
+ .chain_err(|| internal(format!("failed to stat `{}`", path.display())))?;
let mtime = FileTime::from_last_modification_time(&meta);
*slot.0.lock().unwrap() = Some(mtime);
}
- LocalFingerprint::EnvBased(..) |
- LocalFingerprint::Precalculated(..) => continue,
+ LocalFingerprint::EnvBased(..) | LocalFingerprint::Precalculated(..) => continue,
}
hash_busted = true;
}
fn hash(&self) -> u64 {
if let Some(s) = *self.memoized_hash.lock().unwrap() {
- return s
+ return s;
}
let ret = util::hash_u64(self);
*self.memoized_hash.lock().unwrap() = Some(ret);
bail!("rust compiler has changed")
}
if self.features != old.features {
- bail!("features have changed: {} != {}", self.features, old.features)
+ bail!(
+ "features have changed: {} != {}",
+ self.features,
+ old.features
+ )
}
if self.target != old.target {
bail!("target configuration has changed")
}
for (new, old) in self.local.iter().zip(&old.local) {
match (new, old) {
- (&LocalFingerprint::Precalculated(ref a),
- &LocalFingerprint::Precalculated(ref b)) => {
+ (
+ &LocalFingerprint::Precalculated(ref a),
+ &LocalFingerprint::Precalculated(ref b),
+ ) => {
if a != b {
- bail!("precalculated components have changed: {} != {}",
- a, b)
+ bail!("precalculated components have changed: {} != {}", a, b)
}
}
- (&LocalFingerprint::MtimeBased(ref on_disk_mtime, ref ap),
- &LocalFingerprint::MtimeBased(ref previously_built_mtime, ref bp)) => {
+ (
+ &LocalFingerprint::MtimeBased(ref on_disk_mtime, ref ap),
+ &LocalFingerprint::MtimeBased(ref previously_built_mtime, ref bp),
+ ) => {
let on_disk_mtime = on_disk_mtime.0.lock().unwrap();
let previously_built_mtime = previously_built_mtime.0.lock().unwrap();
};
if should_rebuild {
- bail!("mtime based components have changed: previously {:?} now {:?}, \
- paths are {:?} and {:?}",
- *previously_built_mtime, *on_disk_mtime, ap, bp)
+ bail!(
+ "mtime based components have changed: previously {:?} now {:?}, \
+ paths are {:?} and {:?}",
+ *previously_built_mtime,
+ *on_disk_mtime,
+ ap,
+ bp
+ )
}
}
- (&LocalFingerprint::EnvBased(ref akey, ref avalue),
- &LocalFingerprint::EnvBased(ref bkey, ref bvalue)) => {
+ (
+ &LocalFingerprint::EnvBased(ref akey, ref avalue),
+ &LocalFingerprint::EnvBased(ref bkey, ref bvalue),
+ ) => {
if *akey != *bkey {
bail!("env vars changed: {} != {}", akey, bkey);
}
if *avalue != *bvalue {
- bail!("env var `{}` changed: previously {:?} now {:?}",
- akey, bvalue, avalue)
+ bail!(
+ "env var `{}` changed: previously {:?} now {:?}",
+ akey,
+ bvalue,
+ avalue
+ )
}
}
_ => bail!("local fingerprint type has changed"),
ref rustflags,
..
} = *self;
- (rustc, features, target, path, profile, local, epoch, rustflags).hash(h);
+ (
+ rustc,
+ features,
+ target,
+ path,
+ profile,
+ local,
+ epoch,
+ rustflags,
+ ).hash(h);
h.write_usize(deps.len());
for &(ref name, ref fingerprint) in deps {
impl ser::Serialize for MtimeSlot {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
- self.0.lock().unwrap().map(|ft| {
- (ft.seconds_relative_to_1970(), ft.nanoseconds())
- }).serialize(s)
+ self.0
+ .lock()
+ .unwrap()
+ .map(|ft| (ft.seconds_relative_to_1970(), ft.nanoseconds()))
+ .serialize(s)
}
}
impl<'de> de::Deserialize<'de> for MtimeSlot {
fn deserialize<D>(d: D) -> Result<MtimeSlot, D::Error>
- where D: de::Deserializer<'de>,
+ where
+ D: de::Deserializer<'de>,
{
let kind: Option<(u64, u32)> = de::Deserialize::deserialize(d)?;
Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| {
///
/// Information like file modification time is only calculated for path
/// dependencies and is calculated in `calculate_target_fresh`.
-fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
- -> CargoResult<Arc<Fingerprint>> {
+fn calculate<'a, 'cfg>(
+ cx: &mut Context<'a, 'cfg>,
+ unit: &Unit<'a>,
+) -> CargoResult<Arc<Fingerprint>> {
if let Some(s) = cx.fingerprints.get(unit) {
- return Ok(Arc::clone(s))
+ return Ok(Arc::clone(s));
}
// Next, recursively calculate the fingerprint for all of our dependencies.
// induce a recompile, they're just dependencies in the sense that they need
// to be built.
let deps = cx.dep_targets(unit)?;
- let deps = deps.iter().filter(|u| {
- !u.target.is_custom_build() && !u.target.is_bin()
- }).map(|unit| {
- calculate(cx, unit).map(|fingerprint| {
- (unit.pkg.package_id().to_string(), fingerprint)
+ let deps = deps.iter()
+ .filter(|u| !u.target.is_custom_build() && !u.target.is_bin())
+ .map(|unit| {
+ calculate(cx, unit).map(|fingerprint| (unit.pkg.package_id().to_string(), fingerprint))
})
- }).collect::<CargoResult<Vec<_>>>()?;
+ .collect::<CargoResult<Vec<_>>>()?;
// And finally, calculate what our own local fingerprint is
let local = if use_dep_info(unit) {
Ok(fingerprint)
}
-
// We want to use the mtime for files if we're a path source, but if we're a
// git/registry source, then the mtime of files may fluctuate, but they won't
// change so long as the source itself remains constant (which is the
///
/// The currently implemented solution is option (1), although it is planned to
/// migrate to option (2) in the near future.
-pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
- -> CargoResult<Preparation> {
- let _p = profile::start(format!("fingerprint build cmd: {}",
- unit.pkg.package_id()));
+pub fn prepare_build_cmd<'a, 'cfg>(
+ cx: &mut Context<'a, 'cfg>,
+ unit: &Unit<'a>,
+) -> CargoResult<Preparation> {
+ let _p = profile::start(format!("fingerprint build cmd: {}", unit.pkg.package_id()));
let new = cx.fingerprint_dir(unit);
let loc = new.join("build");
if let Some(output_path) = output_path {
let outputs = state.outputs.lock().unwrap();
let outputs = &outputs[&key];
- if !outputs.rerun_if_changed.is_empty() ||
- !outputs.rerun_if_env_changed.is_empty() {
+ if !outputs.rerun_if_changed.is_empty() || !outputs.rerun_if_env_changed.is_empty() {
let deps = BuildDeps::new(&output_path, Some(outputs));
fingerprint.local = local_fingerprints_deps(&deps, &target_root, &pkg_root);
fingerprint.update_local(&target_root)?;
write_fingerprint(&loc, &fingerprint)
});
- Ok((if compare.is_ok() {Fresh} else {Dirty}, write_fingerprint, Work::noop()))
+ Ok((
+ if compare.is_ok() { Fresh } else { Dirty },
+ write_fingerprint,
+ Work::noop(),
+ ))
}
-fn build_script_local_fingerprints<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- unit: &Unit<'a>)
- -> CargoResult<(Vec<LocalFingerprint>, Option<PathBuf>)>
-{
+fn build_script_local_fingerprints<'a, 'cfg>(
+ cx: &mut Context<'a, 'cfg>,
+ unit: &Unit<'a>,
+) -> CargoResult<(Vec<LocalFingerprint>, Option<PathBuf>)> {
let state = cx.build_state.outputs.lock().unwrap();
// First up, if this build script is entirely overridden, then we just
// return the hash of what we overrode it with.
// fingerprint afterwards because this is all just overridden.
if let Some(output) = state.get(&(unit.pkg.package_id().clone(), unit.kind)) {
debug!("override local fingerprints deps");
- let s = format!("overridden build state with hash: {}",
- util::hash_u64(output));
- return Ok((vec![LocalFingerprint::Precalculated(s)], None))
+ let s = format!(
+ "overridden build state with hash: {}",
+ util::hash_u64(output)
+ );
+ return Ok((vec![LocalFingerprint::Precalculated(s)], None));
}
// Next up we look at the previously listed dependencies for the build
if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() {
debug!("old local fingerprints deps");
let s = pkg_fingerprint(cx, unit.pkg)?;
- return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output)))
+ return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output)));
}
// Ok so now we're in "new mode" where we can have files listed as
// dependencies as well as env vars listed as dependencies. Process them all
// here.
- Ok((local_fingerprints_deps(deps, cx.target_root(), unit.pkg.root()), Some(output)))
+ Ok((
+ local_fingerprints_deps(deps, cx.target_root(), unit.pkg.root()),
+ Some(output),
+ ))
}
-fn local_fingerprints_deps(deps: &BuildDeps, target_root: &Path, pkg_root: &Path)
- -> Vec<LocalFingerprint>
-{
+fn local_fingerprints_deps(
+ deps: &BuildDeps,
+ target_root: &Path,
+ pkg_root: &Path,
+) -> Vec<LocalFingerprint> {
debug!("new local fingerprints deps");
let mut local = Vec::new();
if !deps.rerun_if_changed.is_empty() {
let hash = fingerprint.hash();
debug!("write fingerprint: {}", loc.display());
paths::write(loc, util::to_hex(hash).as_bytes())?;
- paths::write(&loc.with_extension("json"),
- &serde_json::to_vec(&fingerprint).unwrap())?;
+ paths::write(
+ &loc.with_extension("json"),
+ &serde_json::to_vec(&fingerprint).unwrap(),
+ )?;
Ok(())
}
}
pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf {
- cx.fingerprint_dir(unit).join(&format!("dep-{}", filename(cx, unit)))
+ cx.fingerprint_dir(unit)
+ .join(&format!("dep-{}", filename(cx, unit)))
}
-fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint)
- -> CargoResult<()> {
+fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<()> {
let old_fingerprint_short = paths::read(loc)?;
let new_hash = new_fingerprint.hash();
if util::to_hex(new_hash) == old_fingerprint_short {
- return Ok(())
+ return Ok(());
}
let old_fingerprint_json = paths::read(&loc.with_extension("json"))?;
}
// Parse the dep-info into a list of paths
-pub fn parse_dep_info(pkg: &Package, dep_info: &Path)
- -> CargoResult<Option<Vec<PathBuf>>>
-{
+pub fn parse_dep_info(pkg: &Package, dep_info: &Path) -> CargoResult<Option<Vec<PathBuf>>> {
let data = match paths::read_bytes(dep_info) {
Ok(data) => data,
Err(_) => return Ok(None),
}
}
-fn dep_info_mtime_if_fresh(pkg: &Package, dep_info: &Path)
- -> CargoResult<Option<FileTime>>
-{
+fn dep_info_mtime_if_fresh(pkg: &Package, dep_info: &Path) -> CargoResult<Option<FileTime>> {
if let Some(paths) = parse_dep_info(pkg, dep_info)? {
Ok(mtime_if_fresh(dep_info, paths.iter()))
} else {
let source_id = pkg.package_id().source_id();
let sources = cx.packages.sources();
- let source = sources.get(source_id).ok_or_else(|| {
- internal("missing package source")
- })?;
+ let source = sources
+ .get(source_id)
+ .ok_or_else(|| internal("missing package source"))?;
source.fingerprint(pkg)
}
fn mtime_if_fresh<I>(output: &Path, paths: I) -> Option<FileTime>
- where I: IntoIterator,
- I::Item: AsRef<Path>,
+where
+ I: IntoIterator,
+ I::Item: AsRef<Path>,
{
let meta = match fs::metadata(output) {
Ok(meta) => meta,
Ok(meta) => meta,
Err(..) => {
info!("stale: {} -- missing", path.display());
- return true
+ return true;
}
};
let mtime2 = FileTime::from_last_modification_time(&meta);
TargetKind::Lib(..) => "lib",
TargetKind::Bin => "bin",
TargetKind::Test => "integration-test",
- TargetKind::ExampleBin |
- TargetKind::ExampleLib(..) => "example",
+ TargetKind::ExampleBin | TargetKind::ExampleLib(..) => "example",
TargetKind::Bench => "bench",
TargetKind::CustomBuild => "build-script",
};
///
/// The serialized Cargo format will contain a list of files, all of which are
/// relative if they're under `root`. or absolute if they're elsewehre.
-pub fn translate_dep_info(rustc_dep_info: &Path,
- cargo_dep_info: &Path,
- pkg_root: &Path,
- rustc_cwd: &Path) -> CargoResult<()> {
+pub fn translate_dep_info(
+ rustc_dep_info: &Path,
+ cargo_dep_info: &Path,
+ pkg_root: &Path,
+ rustc_cwd: &Path,
+) -> CargoResult<()> {
let target = parse_rustc_dep_info(rustc_dep_info)?;
- let deps = &target.get(0).ok_or_else(|| {
- internal("malformed dep-info format, no targets".to_string())
- })?.1;
+ let deps = &target
+ .get(0)
+ .ok_or_else(|| internal("malformed dep-info format, no targets".to_string()))?
+ .1;
let mut new_contents = Vec::new();
for file in deps {
Ok(())
}
-pub fn parse_rustc_dep_info(rustc_dep_info: &Path)
- -> CargoResult<Vec<(String, Vec<String>)>>
-{
+pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult<Vec<(String, Vec<String>)>> {
let contents = paths::read(rustc_dep_info)?;
- contents.lines()
+ contents
+ .lines()
.filter_map(|l| l.find(": ").map(|i| (l, i)))
.map(|(line, pos)| {
let target = &line[..pos];
use std::fmt;
-use util::{CargoResult, Fresh, Dirty, Freshness};
+use util::{CargoResult, Dirty, Fresh, Freshness};
use super::job_queue::JobState;
-pub struct Job { dirty: Work, fresh: Work }
+pub struct Job {
+ dirty: Work,
+ fresh: Work,
+}
/// Each proc should send its description before starting.
/// It should send either once or close immediately.
pub struct Work {
- inner: Box<for <'a, 'b> FnBox<&'a JobState<'b>, CargoResult<()>> + Send>,
+ inner: Box<for<'a, 'b> FnBox<&'a JobState<'b>, CargoResult<()>> + Send>,
}
trait FnBox<A, R> {
impl Work {
pub fn new<F>(f: F) -> Work
- where F: FnOnce(&JobState) -> CargoResult<()> + Send + 'static
+ where
+ F: FnOnce(&JobState) -> CargoResult<()> + Send + 'static,
{
Work { inner: Box::new(f) }
}
use std::fmt;
use std::io;
use std::mem;
-use std::sync::mpsc::{channel, Sender, Receiver};
+use std::sync::mpsc::{channel, Receiver, Sender};
use crossbeam::{self, Scope};
use jobserver::{Acquired, HelperThread};
-use core::{PackageId, Target, Profile};
-use util::{Config, DependencyQueue, Fresh, Dirty, Freshness};
-use util::{CargoResult, ProcessBuilder, profile, internal, CargoResultExt};
-use {handle_error};
+use core::{PackageId, Profile, Target};
+use util::{Config, DependencyQueue, Dirty, Fresh, Freshness};
+use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder};
+use handle_error;
use super::{Context, Kind, Unit};
use super::job::Job;
}
}
- pub fn enqueue<'cfg>(&mut self,
- cx: &Context<'a, 'cfg>,
- unit: &Unit<'a>,
- job: Job,
- fresh: Freshness) -> CargoResult<()> {
+ pub fn enqueue<'cfg>(
+ &mut self,
+ cx: &Context<'a, 'cfg>,
+ unit: &Unit<'a>,
+ job: Job,
+ fresh: Freshness,
+ ) -> CargoResult<()> {
let key = Key::new(unit);
let deps = key.dependencies(cx)?;
- self.queue.queue(Fresh, key, Vec::new(), &deps).push((job, fresh));
+ self.queue
+ .queue(Fresh, key, Vec::new(), &deps)
+ .push((job, fresh));
*self.counts.entry(key.pkg).or_insert(0) += 1;
Ok(())
}
// As a result, this `transmute` to a longer lifetime should be safe in
// practice.
let tx = self.tx.clone();
- let tx = unsafe {
- mem::transmute::<Sender<Message<'a>>, Sender<Message<'static>>>(tx)
- };
- let helper = cx.jobserver.clone().into_helper_thread(move |token| {
- drop(tx.send(Message::Token(token)));
- }).chain_err(|| {
- "failed to create helper thread for jobserver management"
- })?;
-
- crossbeam::scope(|scope| {
- self.drain_the_queue(cx, scope, &helper)
- })
+ let tx = unsafe { mem::transmute::<Sender<Message<'a>>, Sender<Message<'static>>>(tx) };
+ let helper = cx.jobserver
+ .clone()
+ .into_helper_thread(move |token| {
+ drop(tx.send(Message::Token(token)));
+ })
+ .chain_err(|| "failed to create helper thread for jobserver management")?;
+
+ crossbeam::scope(|scope| self.drain_the_queue(cx, scope, &helper))
}
- fn drain_the_queue(&mut self,
- cx: &mut Context,
- scope: &Scope<'a>,
- jobserver_helper: &HelperThread)
- -> CargoResult<()> {
+ fn drain_the_queue(
+ &mut self,
+ cx: &mut Context,
+ scope: &Scope<'a>,
+ jobserver_helper: &HelperThread,
+ ) -> CargoResult<()> {
use std::time::Instant;
let mut tokens = Vec::new();
// start requesting job tokens. Each job after the first needs to
// request a token.
while let Some((fresh, key, jobs)) = self.queue.dequeue() {
- let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| {
- f.combine(fresh)
- });
- self.pending.insert(key, PendingBuild {
- amt: jobs.len(),
- fresh: total_fresh,
- });
+ let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| f.combine(fresh));
+ self.pending.insert(
+ key,
+ PendingBuild {
+ amt: jobs.len(),
+ fresh: total_fresh,
+ },
+ );
for (job, f) in jobs {
queue.push((key, job, f.combine(fresh)));
if self.active + queue.len() > 0 {
// If after all that we're not actually running anything then we're
// done!
if self.active == 0 {
- break
+ break;
}
// And finally, before we block waiting for the next event, drop any
error = Some(format_err!("build failed"));
handle_error(e, &mut *cx.config.shell());
cx.config.shell().warn(
- "build failed, waiting for other \
- jobs to finish...")?;
+ "build failed, waiting for other \
+ jobs to finish...",
+ )?;
} else {
error = Some(e);
}
}
}
Message::Token(acquired_token) => {
- tokens.push(acquired_token.chain_err(|| {
- "failed to acquire jobserver token"
- })?);
+ tokens.push(acquired_token.chain_err(|| "failed to acquire jobserver token")?);
}
}
}
let build_type = if self.is_release { "release" } else { "dev" };
let profile = cx.lib_profile();
- let mut opt_type = String::from(if profile.opt_level == "0" { "unoptimized" }
- else { "optimized" });
+ let mut opt_type = String::from(if profile.opt_level == "0" {
+ "unoptimized"
+ } else {
+ "optimized"
+ });
if profile.debuginfo.is_some() {
opt_type += " + debuginfo";
}
let duration = start_time.elapsed();
- let time_elapsed = format!("{}.{1:.2} secs",
- duration.as_secs(),
- duration.subsec_nanos() / 10_000_000);
+ let time_elapsed = format!(
+ "{}.{1:.2} secs",
+ duration.as_secs(),
+ duration.subsec_nanos() / 10_000_000
+ );
if self.queue.is_empty() {
- let message = format!("{} [{}] target(s) in {}",
- build_type,
- opt_type,
- time_elapsed);
+ let message = format!(
+ "{} [{}] target(s) in {}",
+ build_type, opt_type, time_elapsed
+ );
cx.config.shell().status("Finished", message)?;
Ok(())
} else if let Some(e) = error {
/// Executes a job in the `scope` given, pushing the spawned thread's
/// handled onto `threads`.
- fn run(&mut self,
- key: Key<'a>,
- fresh: Freshness,
- job: Job,
- config: &Config,
- scope: &Scope<'a>) -> CargoResult<()> {
+ fn run(
+ &mut self,
+ key: Key<'a>,
+ fresh: Freshness,
+ job: Job,
+ config: &Config,
+ scope: &Scope<'a>,
+ ) -> CargoResult<()> {
info!("start: {:?}", key);
self.active += 1;
let my_tx = self.tx.clone();
let doit = move || {
- let res = job.run(fresh, &JobState {
- tx: my_tx.clone(),
- });
+ let res = job.run(fresh, &JobState { tx: my_tx.clone() });
my_tx.send(Message::Finish(key, res)).unwrap();
};
match fresh {
Freshness::Fresh => doit(),
- Freshness::Dirty => { scope.spawn(doit); }
+ Freshness::Dirty => {
+ scope.spawn(doit);
+ }
}
// Print out some nice progress information
// In general, we try to print "Compiling" for the first nontrivial task
// run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once.
- fn note_working_on(&mut self,
- config: &Config,
- key: &Key<'a>,
- fresh: Freshness) -> CargoResult<()> {
- if (self.compiled.contains(key.pkg) && !key.profile.doc) ||
- (self.documented.contains(key.pkg) && key.profile.doc) {
- return Ok(())
+ fn note_working_on(
+ &mut self,
+ config: &Config,
+ key: &Key<'a>,
+ fresh: Freshness,
+ ) -> CargoResult<()> {
+ if (self.compiled.contains(key.pkg) && !key.profile.doc)
+ || (self.documented.contains(key.pkg) && key.profile.doc)
+ {
+ return Ok(());
}
match fresh {
}
}
- fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>)
- -> CargoResult<Vec<Key<'a>>> {
+ fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) -> CargoResult<Vec<Key<'a>>> {
let unit = Unit {
pkg: cx.get_package(self.pkg)?,
target: self.target,
kind: self.kind,
};
let targets = cx.dep_targets(&unit)?;
- Ok(targets.iter().filter_map(|unit| {
- // Binaries aren't actually needed to *compile* tests, just to run
- // them, so we don't include this dependency edge in the job graph.
- if self.target.is_test() && unit.target.is_bin() {
- None
- } else {
- Some(Key::new(unit))
- }
- }).collect())
+ Ok(targets
+ .iter()
+ .filter_map(|unit| {
+ // Binaries aren't actually needed to *compile* tests, just to run
+ // them, so we don't include this dependency edge in the job graph.
+ if self.target.is_test() && unit.target.is_bin() {
+ None
+ } else {
+ Some(Key::new(unit))
+ }
+ })
+ .collect())
}
}
impl<'a> fmt::Debug for Key<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "{} => {}/{} => {:?}", self.pkg, self.target, self.profile,
- self.kind)
+ write!(
+ f,
+ "{} => {}/{} => {:?}",
+ self.pkg, self.target, self.profile, self.kind
+ )
}
}
use std::fs;
use std::io;
-use std::path::{PathBuf, Path};
+use std::path::{Path, PathBuf};
use core::Workspace;
-use util::{Config, FileLock, CargoResult, Filesystem};
+use util::{CargoResult, Config, FileLock, Filesystem};
/// Contains the paths of all target output locations.
///
///
/// Differs from `at` in that this calculates the root path from the workspace target directory,
/// adding the target triple and the profile (debug, release, ...).
- pub fn new(ws: &Workspace,
- triple: Option<&str>,
- dest: &str) -> CargoResult<Layout> {
+ pub fn new(ws: &Workspace, triple: Option<&str>, dest: &str) -> CargoResult<Layout> {
let mut path = ws.target_dir();
// Flexible target specifications often point at filenames, so interpret
// the target triple as a Path and then just use the file stem as the
// component for the directory name.
if let Some(triple) = triple {
- path.push(Path::new(triple).file_stem().ok_or_else(|| {
- format_err!("target was empty")
- })?);
+ path.push(Path::new(triple)
+ .file_stem()
+ .ok_or_else(|| format_err!("target was empty"))?);
}
path.push(dest);
Layout::at(ws.config(), path)
/// This is recommended to prevent derived/temporary files from bloating backups.
fn exclude_from_backups(&self, path: &Path) {
use std::ptr;
- use core_foundation::{url, number, string};
+ use core_foundation::{number, string, url};
use core_foundation::base::TCFType;
// For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey
}
/// Fetch the root path.
- pub fn dest(&self) -> &Path { &self.root }
+ pub fn dest(&self) -> &Path {
+ &self.root
+ }
/// Fetch the deps path.
- pub fn deps(&self) -> &Path { &self.deps }
+ pub fn deps(&self) -> &Path {
+ &self.deps
+ }
/// Fetch the examples path.
- pub fn examples(&self) -> &Path { &self.examples }
+ pub fn examples(&self) -> &Path {
+ &self.examples
+ }
/// Fetch the root path.
- pub fn root(&self) -> &Path { &self.root }
+ pub fn root(&self) -> &Path {
+ &self.root
+ }
/// Fetch the incremental path.
- pub fn incremental(&self) -> &Path { &self.incremental }
+ pub fn incremental(&self) -> &Path {
+ &self.incremental
+ }
/// Fetch the fingerprint path.
- pub fn fingerprint(&self) -> &Path { &self.fingerprint }
+ pub fn fingerprint(&self) -> &Path {
+ &self.fingerprint
+ }
/// Fetch the build path.
- pub fn build(&self) -> &Path { &self.build }
+ pub fn build(&self) -> &Path {
+ &self.build
+ }
}
use std::collections::{HashMap, HashSet};
use std::fmt::Write;
-use core::{Resolve, PackageId};
+use core::{PackageId, Resolve};
use util::CargoResult;
use super::Unit;
pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'a>) -> CargoResult<()> {
if !self.validated.insert(unit.pkg.package_id()) {
- return Ok(())
+ return Ok(());
}
let lib = match unit.pkg.manifest().links() {
Some(lib) => lib,
let dep_path = resolve.path_to_top(pkgid);
let mut dep_path_desc = format!("package `{}`", dep_path[0]);
for dep in dep_path.iter().skip(1) {
- write!(dep_path_desc,
- "\n ... which is depended on by `{}`",
- dep).unwrap();
+ write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap();
}
dep_path_desc
};
- bail!("multiple packages link to native library `{}`, \
- but a native library can be linked only once\n\
- \n\
- {}\nlinks to native library `{}`\n\
- \n\
- {}\nalso links to native library `{}`",
- lib,
- describe_path(prev), lib,
- describe_path(pkg), lib)
+ bail!(
+ "multiple packages link to native library `{}`, \
+ but a native library can be linked only once\n\
+ \n\
+ {}\nlinks to native library `{}`\n\
+ \n\
+ {}\nalso links to native library `{}`",
+ lib,
+ describe_path(prev),
+ lib,
+ describe_path(pkg),
+ lib
+ )
}
- if !unit.pkg.manifest().targets().iter().any(|t| t.is_custom_build()) {
- bail!("package `{}` specifies that it links to `{}` but does not \
- have a custom build script", unit.pkg.package_id(), lib)
+ if !unit.pkg
+ .manifest()
+ .targets()
+ .iter()
+ .any(|t| t.is_custom_build())
+ {
+ bail!(
+ "package `{}` specifies that it links to `{}` but does not \
+ have a custom build script",
+ unit.pkg.package_id(),
+ lib
+ )
}
self.links.insert(lib.to_string(), unit.pkg.package_id());
Ok(())
use same_file::is_same_file;
use serde_json;
-use core::{Feature, Package, PackageId, PackageSet, Target, Resolve};
+use core::{Feature, Package, PackageId, PackageSet, Resolve, Target};
use core::{Profile, Profiles, Workspace};
use core::manifest::Lto;
use core::shell::ColorChoice;
-use util::{self, ProcessBuilder, machine_message};
-use util::{Config, internal, profile, join_paths};
+use util::{self, machine_message, ProcessBuilder};
+use util::{internal, join_paths, profile, Config};
use util::paths;
use util::errors::{CargoResult, CargoResultExt, Internal};
use util::Freshness;
use self::output_depinfo::output_depinfo;
pub use self::compilation::Compilation;
-pub use self::context::{Context, Unit, TargetFileType};
-pub use self::custom_build::{BuildOutput, BuildMap, BuildScripts};
+pub use self::context::{Context, TargetFileType, Unit};
+pub use self::custom_build::{BuildMap, BuildOutput, BuildScripts};
pub use self::layout::is_bad_artifact_name;
mod compilation;
///
/// These will be the same unless cross-compiling.
#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)]
-pub enum Kind { Host, Target }
+pub enum Kind {
+ Host,
+ Target,
+}
/// Configuration information for a rustc build.
#[derive(Default, Clone)]
/// In case of an `Err`, Cargo will not continue with the build process for
/// this package.
- fn exec(&self,
- cmd: ProcessBuilder,
- _id: &PackageId,
- _target: &Target)
- -> CargoResult<()> {
+ fn exec(&self, cmd: ProcessBuilder, _id: &PackageId, _target: &Target) -> CargoResult<()> {
cmd.exec()?;
Ok(())
}
- fn exec_json(&self,
- cmd: ProcessBuilder,
- _id: &PackageId,
- _target: &Target,
- handle_stdout: &mut FnMut(&str) -> CargoResult<()>,
- handle_stderr: &mut FnMut(&str) -> CargoResult<()>)
- -> CargoResult<()> {
+ fn exec_json(
+ &self,
+ cmd: ProcessBuilder,
+ _id: &PackageId,
+ _target: &Target,
+ handle_stdout: &mut FnMut(&str) -> CargoResult<()>,
+ handle_stderr: &mut FnMut(&str) -> CargoResult<()>,
+ ) -> CargoResult<()> {
cmd.exec_with_streaming(handle_stdout, handle_stderr, false)?;
Ok(())
}
// Returns a mapping of the root package plus its immediate dependencies to
// where the compiled libraries are all located.
-pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
- pkg_targets: &'a PackagesToBuild<'a>,
- packages: &'a PackageSet<'cfg>,
- resolve: &'a Resolve,
- config: &'cfg Config,
- build_config: BuildConfig,
- profiles: &'a Profiles,
- exec: &Arc<Executor>)
- -> CargoResult<Compilation<'cfg>> {
- let units = pkg_targets.iter().flat_map(|&(pkg, ref targets)| {
- let default_kind = if build_config.requested_target.is_some() {
- Kind::Target
- } else {
- Kind::Host
- };
- targets.iter().map(move |&(target, profile)| {
- Unit {
+pub fn compile_targets<'a, 'cfg: 'a>(
+ ws: &Workspace<'cfg>,
+ pkg_targets: &'a PackagesToBuild<'a>,
+ packages: &'a PackageSet<'cfg>,
+ resolve: &'a Resolve,
+ config: &'cfg Config,
+ build_config: BuildConfig,
+ profiles: &'a Profiles,
+ exec: &Arc<Executor>,
+) -> CargoResult<Compilation<'cfg>> {
+ let units = pkg_targets
+ .iter()
+ .flat_map(|&(pkg, ref targets)| {
+ let default_kind = if build_config.requested_target.is_some() {
+ Kind::Target
+ } else {
+ Kind::Host
+ };
+ targets.iter().map(move |&(target, profile)| Unit {
pkg,
target,
profile,
- kind: if target.for_host() {Kind::Host} else {default_kind},
- }
+ kind: if target.for_host() {
+ Kind::Host
+ } else {
+ default_kind
+ },
+ })
})
- }).collect::<Vec<_>>();
+ .collect::<Vec<_>>();
- let mut cx = Context::new(ws, resolve, packages, config,
- build_config, profiles)?;
+ let mut cx = Context::new(ws, resolve, packages, config, build_config, profiles)?;
let mut queue = JobQueue::new(&cx);
};
if unit.profile.test {
- cx.compilation.tests.push((unit.pkg.clone(),
- unit.target.kind().clone(),
- unit.target.name().to_string(),
- dst.clone()));
+ cx.compilation.tests.push((
+ unit.pkg.clone(),
+ unit.target.kind().clone(),
+ unit.target.name().to_string(),
+ dst.clone(),
+ ));
} else if unit.target.is_bin() || unit.target.is_example() {
cx.compilation.binaries.push(bindst.clone());
} else if unit.target.is_lib() {
let pkgid = unit.pkg.package_id().clone();
- cx.compilation.libraries.entry(pkgid).or_insert_with(HashSet::new)
- .insert((unit.target.clone(), dst.clone()));
+ cx.compilation
+ .libraries
+ .entry(pkgid)
+ .or_insert_with(HashSet::new)
+ .insert((unit.target.clone(), dst.clone()));
}
}
for dep in cx.dep_targets(unit)?.iter() {
- if !unit.target.is_lib() { continue }
+ if !unit.target.is_lib() {
+ continue;
+ }
if dep.profile.run_custom_build {
let out_dir = cx.build_script_out_dir(dep).display().to_string();
- cx.compilation.extra_env.entry(dep.pkg.package_id().clone())
- .or_insert_with(Vec::new)
- .push(("OUT_DIR".to_string(), out_dir));
+ cx.compilation
+ .extra_env
+ .entry(dep.pkg.package_id().clone())
+ .or_insert_with(Vec::new)
+ .push(("OUT_DIR".to_string(), out_dir));
}
- if !dep.target.is_lib() { continue }
- if dep.profile.doc { continue }
+ if !dep.target.is_lib() {
+ continue;
+ }
+ if dep.profile.doc {
+ continue;
+ }
let v = cx.target_filenames(dep)?;
- cx.compilation.libraries
+ cx.compilation
+ .libraries
.entry(unit.pkg.package_id().clone())
.or_insert_with(HashSet::new)
- .extend(v.iter().map(|&(ref f, _, _)| {
- (dep.target.clone(), f.clone())
- }));
+ .extend(
+ v.iter()
+ .map(|&(ref f, _, _)| (dep.target.clone(), f.clone())),
+ );
}
let feats = cx.resolve.features(unit.pkg.package_id());
if !feats.is_empty() {
- cx.compilation.cfgs.entry(unit.pkg.package_id().clone()).or_insert_with(|| {
- feats.iter().map(|feat| format!("feature=\"{}\"", feat)).collect()
- });
+ cx.compilation
+ .cfgs
+ .entry(unit.pkg.package_id().clone())
+ .or_insert_with(|| {
+ feats
+ .iter()
+ .map(|feat| format!("feature=\"{}\"", feat))
+ .collect()
+ });
}
let rustdocflags = cx.rustdocflags_args(unit)?;
if !rustdocflags.is_empty() {
- cx.compilation.rustdocflags.entry(unit.pkg.package_id().clone())
+ cx.compilation
+ .rustdocflags
+ .entry(unit.pkg.package_id().clone())
.or_insert(rustdocflags);
}
}
for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() {
- cx.compilation.cfgs.entry(pkg.clone())
+ cx.compilation
+ .cfgs
+ .entry(pkg.clone())
.or_insert_with(HashSet::new)
.extend(output.cfgs.iter().cloned());
- cx.compilation.extra_env.entry(pkg.clone())
+ cx.compilation
+ .extra_env
+ .entry(pkg.clone())
.or_insert_with(Vec::new)
.extend(output.env.iter().cloned());
Ok(cx.compilation)
}
-fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>,
- jobs: &mut JobQueue<'a>,
- unit: &Unit<'a>,
- exec: &Arc<Executor>) -> CargoResult<()> {
+fn compile<'a, 'cfg: 'a>(
+ cx: &mut Context<'a, 'cfg>,
+ jobs: &mut JobQueue<'a>,
+ unit: &Unit<'a>,
+ exec: &Arc<Executor>,
+) -> CargoResult<()> {
if !cx.compiled.insert(*unit) {
- return Ok(())
+ return Ok(());
}
// Build up the work to be done to compile this unit, enqueuing it once
// we've got everything constructed.
- let p = profile::start(format!("preparing: {}/{}", unit.pkg,
- unit.target.name()));
+ let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name()));
fingerprint::prepare_init(cx, unit)?;
cx.links.validate(cx.resolve, unit)?;
Ok(())
}
-fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- unit: &Unit<'a>,
- exec: &Arc<Executor>) -> CargoResult<Work> {
+fn rustc<'a, 'cfg>(
+ cx: &mut Context<'a, 'cfg>,
+ unit: &Unit<'a>,
+ exec: &Arc<Executor>,
+) -> CargoResult<Work> {
let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?;
let name = unit.pkg.name().to_string();
// If we are a binary and the package also contains a library, then we
// don't pass the `-l` flags.
- let pass_l_flag = unit.target.is_lib() ||
- !unit.pkg.targets().iter().any(|t| t.is_lib());
+ let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib());
let do_rename = unit.target.allows_underscores() && !unit.profile.test;
let real_name = unit.target.name().to_string();
let crate_name = unit.target.crate_name();
let root_output = cx.target_root().to_path_buf();
let pkg_root = unit.pkg.root().to_path_buf();
- let cwd = rustc.get_cwd().unwrap_or_else(|| cx.config.cwd()).to_path_buf();
+ let cwd = rustc
+ .get_cwd()
+ .unwrap_or_else(|| cx.config.cwd())
+ .to_path_buf();
return Ok(Work::new(move |state| {
// Only at runtime have we discovered what the extra -L and -l
// previous build scripts, we include them in the rustc invocation.
if let Some(build_deps) = build_deps {
let build_state = build_state.outputs.lock().unwrap();
- add_native_deps(&mut rustc, &build_state, &build_deps,
- pass_l_flag, ¤t_id)?;
- add_plugin_deps(&mut rustc, &build_state, &build_deps,
- &root_output)?;
+ add_native_deps(
+ &mut rustc,
+ &build_state,
+ &build_deps,
+ pass_l_flag,
+ ¤t_id,
+ )?;
+ add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?;
add_custom_env(&mut rustc, &build_state, ¤t_id, kind)?;
}
state.running(&rustc);
if json_messages {
- exec.exec_json(rustc, &package_id, &target,
- &mut |line| if !line.is_empty() {
- Err(internal(&format!("compiler stdout is not empty: `{}`", line)))
- } else {
- Ok(())
+ exec.exec_json(
+ rustc,
+ &package_id,
+ &target,
+ &mut |line| {
+ if !line.is_empty() {
+ Err(internal(&format!(
+ "compiler stdout is not empty: `{}`",
+ line
+ )))
+ } else {
+ Ok(())
+ }
},
&mut |line| {
// stderr from rustc can have a mix of JSON and non-JSON output
writeln!(io::stderr(), "{}", line)?;
}
Ok(())
- }
- ).chain_err(|| {
- format!("Could not compile `{}`.", name)
- })?;
+ },
+ ).chain_err(|| format!("Could not compile `{}`.", name))?;
} else {
exec.exec(rustc, &package_id, &target)
.map_err(Internal::new)
- .chain_err(|| {
- format!("Could not compile `{}`.", name)
- })?;
+ .chain_err(|| format!("Could not compile `{}`.", name))?;
}
if do_rename && real_name != crate_name {
let dst = &filenames[0].0;
- let src = dst.with_file_name(dst.file_name().unwrap()
- .to_str().unwrap()
- .replace(&real_name, &crate_name));
+ let src = dst.with_file_name(
+ dst.file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .replace(&real_name, &crate_name),
+ );
if src.exists() && src.file_name() != dst.file_name() {
- fs::rename(&src, &dst).chain_err(|| {
- internal(format!("could not rename crate {:?}", src))
- })?;
+ fs::rename(&src, &dst)
+ .chain_err(|| internal(format!("could not rename crate {:?}", src)))?;
}
}
if rustc_dep_info_loc.exists() {
- fingerprint::translate_dep_info(&rustc_dep_info_loc,
- &dep_info_loc,
- &pkg_root,
- &cwd)
+ fingerprint::translate_dep_info(&rustc_dep_info_loc, &dep_info_loc, &pkg_root, &cwd)
.chain_err(|| {
- internal(format!("could not parse/generate dep info at: {}",
- rustc_dep_info_loc.display()))
+ internal(format!(
+ "could not parse/generate dep info at: {}",
+ rustc_dep_info_loc.display()
+ ))
})?;
}
// Add all relevant -L and -l flags from dependencies (now calculated and
// present in `state`) to the command provided
- fn add_native_deps(rustc: &mut ProcessBuilder,
- build_state: &BuildMap,
- build_scripts: &BuildScripts,
- pass_l_flag: bool,
- current_id: &PackageId) -> CargoResult<()> {
+ fn add_native_deps(
+ rustc: &mut ProcessBuilder,
+ build_state: &BuildMap,
+ build_scripts: &BuildScripts,
+ pass_l_flag: bool,
+ current_id: &PackageId,
+ ) -> CargoResult<()> {
for key in build_scripts.to_link.iter() {
let output = build_state.get(key).ok_or_else(|| {
- internal(format!("couldn't find build state for {}/{:?}",
- key.0, key.1))
+ internal(format!(
+ "couldn't find build state for {}/{:?}",
+ key.0, key.1
+ ))
})?;
for path in output.library_paths.iter() {
rustc.arg("-L").arg(path);
// Add all custom environment variables present in `state` (after they've
// been put there by one of the `build_scripts`) to the command provided.
- fn add_custom_env(rustc: &mut ProcessBuilder,
- build_state: &BuildMap,
- current_id: &PackageId,
- kind: Kind) -> CargoResult<()> {
+ fn add_custom_env(
+ rustc: &mut ProcessBuilder,
+ build_state: &BuildMap,
+ current_id: &PackageId,
+ kind: Kind,
+ ) -> CargoResult<()> {
let key = (current_id.clone(), kind);
if let Some(output) = build_state.get(&key) {
for &(ref name, ref value) in output.env.iter() {
/// Link the compiled target (often of form `foo-{metadata_hash}`) to the
/// final target. This must happen during both "Fresh" and "Compile"
-fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- unit: &Unit<'a>,
- fresh: bool) -> CargoResult<Work> {
+fn link_targets<'a, 'cfg>(
+ cx: &mut Context<'a, 'cfg>,
+ unit: &Unit<'a>,
+ fresh: bool,
+) -> CargoResult<Work> {
let filenames = cx.target_filenames(unit)?;
let package_id = unit.pkg.package_id().clone();
let target = unit.target.clone();
let profile = unit.profile.clone();
- let features = cx.resolve.features_sorted(&package_id).into_iter()
+ let features = cx.resolve
+ .features_sorted(&package_id)
+ .into_iter()
.map(|s| s.to_owned())
.collect();
let json_messages = cx.build_config.json_messages;
// This may have been a `cargo rustc` command which changes the
// output, so the source may not actually exist.
if !src.exists() {
- continue
+ continue;
}
let dst = match link_dst.as_ref() {
Some(dst) => dst,
debug!("linking {} to {}", src.display(), dst.display());
if is_same_file(src, dst).unwrap_or(false) {
- continue
+ continue;
}
if dst.exists() {
paths::remove_file(&dst)?;
fs::copy(src, dst).map(|_| ())
})
.chain_err(|| {
- format!("failed to link or copy `{}` to `{}`",
- src.display(), dst.display())
+ format!(
+ "failed to link or copy `{}` to `{}`",
+ src.display(),
+ dst.display()
+ )
})?;
}
// For all plugin dependencies, add their -L paths (now calculated and
// present in `state`) to the dynamic library load path for the command to
// execute.
-fn add_plugin_deps(rustc: &mut ProcessBuilder,
- build_state: &BuildMap,
- build_scripts: &BuildScripts,
- root_output: &PathBuf)
- -> CargoResult<()> {
+fn add_plugin_deps(
+ rustc: &mut ProcessBuilder,
+ build_state: &BuildMap,
+ build_scripts: &BuildScripts,
+ root_output: &PathBuf,
+) -> CargoResult<()> {
let var = util::dylib_path_envvar();
let search_path = rustc.get_env(var).unwrap_or_default();
let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
for id in build_scripts.plugins.iter() {
let key = (id.clone(), Kind::Host);
- let output = build_state.get(&key).ok_or_else(|| {
- internal(format!("couldn't find libs for plugin dep {}", id))
- })?;
- search_path.append(&mut filter_dynamic_search_path(output.library_paths.iter(),
- root_output));
+ let output = build_state
+ .get(&key)
+ .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", id)))?;
+ search_path.append(&mut filter_dynamic_search_path(
+ output.library_paths.iter(),
+ root_output,
+ ));
}
let search_path = join_paths(&search_path, var)?;
rustc.env(var, &search_path);
// Strip off prefixes like "native=" or "framework=" and filter out directories
// *not* inside our output directory since they are likely spurious and can cause
// clashes with system shared libraries (issue #3366).
-fn filter_dynamic_search_path<'a, I>(paths :I, root_output: &PathBuf) -> Vec<PathBuf>
- where I: Iterator<Item=&'a PathBuf> {
+fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &PathBuf) -> Vec<PathBuf>
+where
+ I: Iterator<Item = &'a PathBuf>,
+{
let mut search_path = vec![];
for dir in paths {
let dir = match dir.to_str() {
Some(s) => {
let mut parts = s.splitn(2, '=');
match (parts.next(), parts.next()) {
- (Some("native"), Some(path)) |
- (Some("crate"), Some(path)) |
- (Some("dependency"), Some(path)) |
- (Some("framework"), Some(path)) |
- (Some("all"), Some(path)) => path.into(),
+ (Some("native"), Some(path))
+ | (Some("crate"), Some(path))
+ | (Some("dependency"), Some(path))
+ | (Some("framework"), Some(path))
+ | (Some("all"), Some(path)) => path.into(),
_ => dir.clone(),
}
}
if dir.starts_with(&root_output) {
search_path.push(dir);
} else {
- debug!("Not including path {} in runtime library search path because it is \
- outside target root {}", dir.display(), root_output.display());
+ debug!(
+ "Not including path {} in runtime library search path because it is \
+ outside target root {}",
+ dir.display(),
+ root_output.display()
+ );
}
}
search_path
}
-fn prepare_rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- crate_types: &[&str],
- unit: &Unit<'a>) -> CargoResult<ProcessBuilder> {
+fn prepare_rustc<'a, 'cfg>(
+ cx: &mut Context<'a, 'cfg>,
+ crate_types: &[&str],
+ unit: &Unit<'a>,
+) -> CargoResult<ProcessBuilder> {
let mut base = cx.compilation.rustc_process(unit.pkg)?;
base.inherit_jobserver(&cx.jobserver);
build_base_args(cx, &mut base, unit, crate_types)?;
Ok(base)
}
-
-fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- unit: &Unit<'a>) -> CargoResult<Work> {
+fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<Work> {
let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg)?;
rustdoc.inherit_jobserver(&cx.jobserver);
rustdoc.arg("--crate-name").arg(&unit.target.crate_name());
}
}
state.running(&rustdoc);
- rustdoc.exec().chain_err(|| format!("Could not document `{}`.", name))?;
+ rustdoc
+ .exec()
+ .chain_err(|| format!("Could not document `{}`.", name))?;
Ok(())
}))
}
cmd.cwd(cwd);
}
-fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- cmd: &mut ProcessBuilder,
- unit: &Unit<'a>,
- crate_types: &[&str]) -> CargoResult<()> {
+fn build_base_args<'a, 'cfg>(
+ cx: &mut Context<'a, 'cfg>,
+ cmd: &mut ProcessBuilder,
+ unit: &Unit<'a>,
+ crate_types: &[&str],
+) -> CargoResult<()> {
let Profile {
- ref opt_level, ref lto, codegen_units, ref rustc_args, debuginfo,
- debug_assertions, overflow_checks, rpath, test, doc: _doc,
- run_custom_build, ref panic, check, ..
+ ref opt_level,
+ ref lto,
+ codegen_units,
+ ref rustc_args,
+ debuginfo,
+ debug_assertions,
+ overflow_checks,
+ rpath,
+ test,
+ doc: _doc,
+ run_custom_build,
+ ref panic,
+ check,
+ ..
} = *unit.profile;
assert!(!run_custom_build);
add_path_args(cx, unit, cmd);
match cx.config.shell().color_choice() {
- ColorChoice::Always => { cmd.arg("--color").arg("always"); }
- ColorChoice::Never => { cmd.arg("--color").arg("never"); }
+ ColorChoice::Always => {
+ cmd.arg("--color").arg("always");
+ }
+ ColorChoice::Never => {
+ cmd.arg("--color").arg("never");
+ }
ColorChoice::CargoAuto => {}
}
cmd.arg("--emit=dep-info,link");
}
- let prefer_dynamic = (unit.target.for_host() &&
- !unit.target.is_custom_build()) ||
- (crate_types.contains(&"dylib") &&
- cx.ws.members().any(|p| p != unit.pkg));
+ let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build())
+ || (crate_types.contains(&"dylib") && cx.ws.members().any(|p| p != unit.pkg));
if prefer_dynamic {
cmd.arg("-C").arg("prefer-dynamic");
}
cmd.arg("-C").arg(&format!("extra-filename=-{}", m));
}
None => {
- cmd.arg("-C").arg(&format!("metadata={}", cx.target_short_hash(unit)));
+ cmd.arg("-C")
+ .arg(&format!("metadata={}", cx.target_short_hash(unit)));
}
}
cmd.arg("--out-dir").arg(&cx.out_dir(unit));
- fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str,
- val: Option<&OsStr>) {
+ fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) {
if let Some(val) = val {
let mut joined = OsString::from(prefix);
joined.push(val);
}
if unit.kind == Kind::Target {
- opt(cmd, "--target", "", cx.requested_target().map(|s| s.as_ref()));
+ opt(
+ cmd,
+ "--target",
+ "",
+ cx.requested_target().map(|s| s.as_ref()),
+ );
}
opt(cmd, "-C", "ar=", cx.ar(unit.kind).map(|s| s.as_ref()));
- opt(cmd, "-C", "linker=", cx.linker(unit.kind).map(|s| s.as_ref()));
+ opt(
+ cmd,
+ "-C",
+ "linker=",
+ cx.linker(unit.kind).map(|s| s.as_ref()),
+ );
cmd.args(&cx.incremental_args(unit)?);
Ok(())
}
-
-fn build_deps_args<'a, 'cfg>(cmd: &mut ProcessBuilder,
- cx: &mut Context<'a, 'cfg>,
- unit: &Unit<'a>) -> CargoResult<()> {
+fn build_deps_args<'a, 'cfg>(
+ cmd: &mut ProcessBuilder,
+ cx: &mut Context<'a, 'cfg>,
+ unit: &Unit<'a>,
+) -> CargoResult<()> {
cmd.arg("-L").arg(&{
let mut deps = OsString::from("dependency=");
deps.push(cx.deps_dir(unit));
// If there is not one linkable target but should, rustc fails later
// on if there is an `extern crate` for it. This may turn into a hard
// error in the future, see PR #4797
- if !dep_targets.iter().any(|u| !u.profile.doc && u.target.linkable()) {
- if let Some(u) = dep_targets.iter()
- .find(|u| !u.profile.doc && u.target.is_lib()) {
- cx.config.shell().warn(format!("The package `{}` \
-provides no linkable target. The compiler might raise an error while compiling \
-`{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \
-Cargo.toml. This warning might turn into a hard error in the future.",
- u.target.crate_name(),
- unit.target.crate_name(),
- u.target.crate_name()))?;
- }
+ if !dep_targets
+ .iter()
+ .any(|u| !u.profile.doc && u.target.linkable())
+ {
+ if let Some(u) = dep_targets
+ .iter()
+ .find(|u| !u.profile.doc && u.target.is_lib())
+ {
+ cx.config.shell().warn(format!(
+ "The package `{}` \
+ provides no linkable target. The compiler might raise an error while compiling \
+ `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \
+ Cargo.toml. This warning might turn into a hard error in the future.",
+ u.target.crate_name(),
+ unit.target.crate_name(),
+ u.target.crate_name()
+ ))?;
+ }
}
for dep in dep_targets {
return Ok(());
- fn link_to<'a, 'cfg>(cmd: &mut ProcessBuilder,
- cx: &mut Context<'a, 'cfg>,
- current: &Unit<'a>,
- dep: &Unit<'a>) -> CargoResult<()> {
+ fn link_to<'a, 'cfg>(
+ cmd: &mut ProcessBuilder,
+ cx: &mut Context<'a, 'cfg>,
+ current: &Unit<'a>,
+ dep: &Unit<'a>,
+ ) -> CargoResult<()> {
for &(ref dst, _, file_type) in cx.target_filenames(dep)?.iter() {
if file_type != TargetFileType::Linkable {
- continue
+ continue;
}
let mut v = OsString::new();
//
// This I believe mostly works out for now, but we'll likely want
// to tighten up this in the future.
- let name = current.pkg.dependencies()
+ let name = current
+ .pkg
+ .dependencies()
.iter()
.filter(|d| d.matches_ignoring_source(dep.pkg.summary()))
.filter_map(|d| d.rename())
fn envify(s: &str) -> String {
s.chars()
- .flat_map(|c| c.to_uppercase())
- .map(|c| if c == '-' {'_'} else {c})
- .collect()
+ .flat_map(|c| c.to_uppercase())
+ .map(|c| if c == '-' { '_' } else { c })
+ .collect()
}
impl Kind {
-use std::collections::{HashSet, BTreeSet};
-use std::io::{Write, BufWriter};
+use std::collections::{BTreeSet, HashSet};
+use std::io::{BufWriter, Write};
use std::fs::File;
use std::path::{Path, PathBuf};
use ops::{Context, Unit};
-use util::{CargoResult, internal};
+use util::{internal, CargoResult};
use util::paths;
use ops::cargo_rustc::fingerprint;
Some(base) => match path.strip_prefix(base) {
Ok(relpath) => relpath,
_ => path,
- }
+ },
};
- relpath.to_str().ok_or_else(|| internal("path not utf-8")).map(|f| f.replace(" ", "\\ "))
+ relpath
+ .to_str()
+ .ok_or_else(|| internal("path not utf-8"))
+ .map(|f| f.replace(" ", "\\ "))
}
fn add_deps_for_unit<'a, 'b>(
context: &mut Context<'a, 'b>,
unit: &Unit<'a>,
visited: &mut HashSet<Unit<'a>>,
-)
- -> CargoResult<()>
-{
+) -> CargoResult<()> {
if !visited.insert(*unit) {
return Ok(());
}
deps.insert(path);
}
} else {
- debug!("can't find dep_info for {:?} {:?}",
- unit.pkg.package_id(), unit.profile);
+ debug!(
+ "can't find dep_info for {:?} {:?}",
+ unit.pkg.package_id(),
+ unit.profile
+ );
return Err(internal("dep_info missing"));
}
}
let basedir_string;
let basedir = match context.config.get_path("build.dep-info-basedir")? {
Some(value) => {
- basedir_string = value.val.as_os_str().to_str().
- ok_or_else(|| internal("build.dep-info-basedir path not utf-8"))?.to_string();
+ basedir_string = value
+ .val
+ .as_os_str()
+ .to_str()
+ .ok_or_else(|| internal("build.dep-info-basedir path not utf-8"))?
+ .to_string();
Some(basedir_string.as_str())
}
None => None,
// If nothing changed don't recreate the file which could alter
// its mtime
if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) {
- if previous.len() == 1 &&
- previous[0].0 == target_fn &&
- previous[0].1 == deps {
- continue
+ if previous.len() == 1 && previous[0].0 == target_fn && previous[0].1 == deps {
+ continue;
}
}
-use std::ffi::{OsString, OsStr};
+use std::ffi::{OsStr, OsString};
use ops::{self, Compilation};
-use util::{self, CargoTestError, Test, ProcessError};
+use util::{self, CargoTestError, ProcessError, Test};
use util::errors::CargoResult;
use core::Workspace;
pub only_doc: bool,
}
-pub fn run_tests(ws: &Workspace,
- options: &TestOptions,
- test_args: &[String]) -> CargoResult<Option<CargoTestError>> {
+pub fn run_tests(
+ ws: &Workspace,
+ options: &TestOptions,
+ test_args: &[String],
+) -> CargoResult<Option<CargoTestError>> {
let compilation = compile_tests(ws, options)?;
if options.no_run {
- return Ok(None)
+ return Ok(None);
}
let (test, mut errors) = if options.only_doc {
assert!(options.compile_opts.filter.is_specific());
// If we have an error and want to fail fast, return
if !errors.is_empty() && !options.no_fail_fast {
- return Ok(Some(CargoTestError::new(test, errors)))
+ return Ok(Some(CargoTestError::new(test, errors)));
}
// If a specific test was requested or we're not running any tests at all,
if options.compile_opts.filter.is_specific() {
match errors.len() {
0 => return Ok(None),
- _ => return Ok(Some(CargoTestError::new(test, errors)))
+ _ => return Ok(Some(CargoTestError::new(test, errors))),
}
}
}
}
-pub fn run_benches(ws: &Workspace,
- options: &TestOptions,
- args: &[String]) -> CargoResult<Option<CargoTestError>> {
+pub fn run_benches(
+ ws: &Workspace,
+ options: &TestOptions,
+ args: &[String],
+) -> CargoResult<Option<CargoTestError>> {
let mut args = args.to_vec();
args.push("--bench".to_string());
let compilation = compile_tests(ws, options)?;
if options.no_run {
- return Ok(None)
+ return Ok(None);
}
let (test, errors) = run_unit_tests(options, &args, &compilation)?;
match errors.len() {
}
}
-fn compile_tests<'a>(ws: &Workspace<'a>,
- options: &TestOptions<'a>)
- -> CargoResult<Compilation<'a>> {
+fn compile_tests<'a>(
+ ws: &Workspace<'a>,
+ options: &TestOptions<'a>,
+) -> CargoResult<Compilation<'a>> {
let mut compilation = ops::compile(ws, &options.compile_opts)?;
- compilation.tests.sort_by(|a, b| {
- (a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2))
- });
+ compilation
+ .tests
+ .sort_by(|a, b| (a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2)));
Ok(compilation)
}
/// Run the unit and integration tests of a project.
-fn run_unit_tests(options: &TestOptions,
- test_args: &[String],
- compilation: &Compilation)
- -> CargoResult<(Test, Vec<ProcessError>)> {
+fn run_unit_tests(
+ options: &TestOptions,
+ test_args: &[String],
+ compilation: &Compilation,
+) -> CargoResult<(Test, Vec<ProcessError>)> {
let config = options.compile_opts.config;
let cwd = options.compile_opts.config.cwd();
};
let mut cmd = compilation.target_process(exe, pkg)?;
cmd.args(test_args);
- config.shell().concise(|shell| {
- shell.status("Running", to_display.display().to_string())
- })?;
- config.shell().verbose(|shell| {
- shell.status("Running", cmd.to_string())
- })?;
+ config
+ .shell()
+ .concise(|shell| shell.status("Running", to_display.display().to_string()))?;
+ config
+ .shell()
+ .verbose(|shell| shell.status("Running", cmd.to_string()))?;
let result = cmd.exec();
if errors.len() == 1 {
let (kind, name, pkg_name, e) = errors.pop().unwrap();
- Ok((Test::UnitTest{kind, name, pkg_name}, vec![e]))
+ Ok((
+ Test::UnitTest {
+ kind,
+ name,
+ pkg_name,
+ },
+ vec![e],
+ ))
} else {
- Ok((Test::Multiple, errors.into_iter().map(|(_, _, _, e)| e).collect()))
+ Ok((
+ Test::Multiple,
+ errors.into_iter().map(|(_, _, _, e)| e).collect(),
+ ))
}
}
-fn run_doc_tests(options: &TestOptions,
- test_args: &[String],
- compilation: &Compilation)
- -> CargoResult<(Test, Vec<ProcessError>)> {
+fn run_doc_tests(
+ options: &TestOptions,
+ test_args: &[String],
+ compilation: &Compilation,
+) -> CargoResult<(Test, Vec<ProcessError>)> {
let mut errors = Vec::new();
let config = options.compile_opts.config;
}
let libs = compilation.to_doc_test.iter().map(|package| {
- (package, package.targets().iter().filter(|t| t.doctested())
- .map(|t| (t.src_path(), t.name(), t.crate_name())))
+ (
+ package,
+ package
+ .targets()
+ .iter()
+ .filter(|t| t.doctested())
+ .map(|t| (t.src_path(), t.name(), t.crate_name())),
+ )
});
for (package, tests) in libs {
for (lib, name, crate_name) in tests {
config.shell().status("Doc-tests", name)?;
let mut p = compilation.rustdoc_process(package)?;
- p.arg("--test").arg(lib)
- .arg("--crate-name").arg(&crate_name);
+ p.arg("--test")
+ .arg(lib)
+ .arg("--crate-name")
+ .arg(&crate_name);
for &rust_dep in &[&compilation.deps_output] {
let mut arg = OsString::from("dependency=");
// dynamically as well, causing problems. As a result we only
// pass `--extern` for rlib deps and skip out on all other
// artifacts.
- if lib.extension() != Some(OsStr::new("rlib")) &&
- !target.for_host() {
- continue
+ if lib.extension() != Some(OsStr::new("rlib")) && !target.for_host() {
+ continue;
}
let mut arg = OsString::from(target.crate_name());
arg.push("=");
p.args(flags);
}
- config.shell().verbose(|shell| {
- shell.status("Running", p.to_string())
- })?;
+ config
+ .shell()
+ .verbose(|shell| shell.status("Running", p.to_string()))?;
if let Err(e) = p.exec() {
let e = e.downcast::<ProcessError>()?;
errors.push(e);
use toml;
-use core::{Resolve, resolver, Workspace};
+use core::{resolver, Resolve, Workspace};
use core::resolver::WorkspaceResolve;
use util::Filesystem;
use util::errors::{CargoResult, CargoResultExt};
pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
if !ws.root().join("Cargo.lock").exists() {
- return Ok(None)
+ return Ok(None);
}
let root = Filesystem::new(ws.root().to_path_buf());
let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?;
let mut s = String::new();
- f.read_to_string(&mut s).chain_err(|| {
- format!("failed to read file: {}", f.path().display())
- })?;
-
- let resolve = (|| -> CargoResult<Option<Resolve>> {
- let resolve : toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?;
- let v: resolver::EncodableResolve = resolve.try_into()?;
- Ok(Some(v.into_resolve(ws)?))
- })().chain_err(|| {
- format!("failed to parse lock file at: {}", f.path().display())
- })?;
+ f.read_to_string(&mut s)
+ .chain_err(|| format!("failed to read file: {}", f.path().display()))?;
+
+ let resolve =
+ (|| -> CargoResult<Option<Resolve>> {
+ let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?;
+ let v: resolver::EncodableResolve = resolve.try_into()?;
+ Ok(Some(v.into_resolve(ws)?))
+ })()
+ .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
Ok(resolve)
}
// helpful on read-only filesystems.
if let Ok(orig) = orig {
if are_equal_lockfiles(orig, &out, ws) {
- return Ok(())
+ return Ok(());
}
}
bail!("can't update in the offline mode");
}
- let flag = if ws.config().network_allowed() {"--locked"} else {"--frozen"};
- bail!("the lock file needs to be updated but {} was passed to \
- prevent this", flag);
+ let flag = if ws.config().network_allowed() {
+ "--locked"
+ } else {
+ "--frozen"
+ };
+ bail!(
+ "the lock file needs to be updated but {} was passed to \
+ prevent this",
+ flag
+ );
}
// Ok, if that didn't work just write it out
- ws_root.open_rw("Cargo.lock", ws.config(), "Cargo.lock file").and_then(|mut f| {
- f.file().set_len(0)?;
- f.write_all(out.as_bytes())?;
- Ok(())
- }).chain_err(|| {
- format!("failed to write {}",
- ws.root().join("Cargo.lock").display())
- })?;
+ ws_root
+ .open_rw("Cargo.lock", ws.config(), "Cargo.lock file")
+ .and_then(|mut f| {
+ f.file().set_len(0)?;
+ f.write_all(out.as_bytes())?;
+ Ok(())
+ })
+ .chain_err(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?;
Ok(())
}
pub use self::cargo_compile::{CompileFilter, CompileMode, FilterRule, MessageFormat, Packages};
pub use self::cargo_read_manifest::{read_package, read_packages};
pub use self::cargo_rustc::{compile_targets, Compilation, Kind, Unit};
-pub use self::cargo_rustc::{Context, is_bad_artifact_name};
-pub use self::cargo_rustc::{BuildOutput, BuildConfig, TargetConfig};
-pub use self::cargo_rustc::{Executor, DefaultExecutor};
+pub use self::cargo_rustc::{is_bad_artifact_name, Context};
+pub use self::cargo_rustc::{BuildConfig, BuildOutput, TargetConfig};
+pub use self::cargo_rustc::{DefaultExecutor, Executor};
pub use self::cargo_run::run;
pub use self::cargo_install::{install, install_list, uninstall};
-pub use self::cargo_new::{new, init, NewOptions, VersionControl};
+pub use self::cargo_new::{init, new, NewOptions, VersionControl};
pub use self::cargo_doc::{doc, DocOptions};
-pub use self::cargo_generate_lockfile::{generate_lockfile};
-pub use self::cargo_generate_lockfile::{update_lockfile};
+pub use self::cargo_generate_lockfile::generate_lockfile;
+pub use self::cargo_generate_lockfile::update_lockfile;
pub use self::cargo_generate_lockfile::UpdateOptions;
pub use self::lockfile::{load_pkg_lockfile, write_pkg_lockfile};
-pub use self::cargo_test::{run_tests, run_benches, TestOptions};
+pub use self::cargo_test::{run_benches, run_tests, TestOptions};
pub use self::cargo_package::{package, PackageOpts};
pub use self::registry::{publish, registry_configuration, RegistryConfig};
-pub use self::registry::{registry_login, search, needs_custom_http_transport, http_handle};
+pub use self::registry::{http_handle, needs_custom_http_transport, registry_login, search};
pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts};
pub use self::registry::configure_http_handle;
pub use self::cargo_fetch::fetch;
pub use self::cargo_pkgid::pkgid;
-pub use self::resolve::{resolve_ws, resolve_ws_precisely, resolve_ws_with_method, resolve_with_previous};
-pub use self::cargo_output_metadata::{output_metadata, OutputMetadataOptions, ExportInfo};
+pub use self::resolve::{resolve_with_previous, resolve_ws, resolve_ws_precisely,
+ resolve_ws_with_method};
+pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions};
mod cargo_clean;
mod cargo_compile;
use curl::easy::{Easy, SslOpt};
use git2;
-use registry::{Registry, NewCrate, NewCrateDependency};
+use registry::{NewCrate, NewCrateDependency, Registry};
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
use core::dependency::Kind;
use core::manifest::ManifestMetadata;
use ops;
-use sources::{RegistrySource};
+use sources::RegistrySource;
use util::config::{self, Config};
use util::paths;
use util::ToUrl;
Some(ref registry) => allowed_registries.contains(registry),
None => false,
} {
- bail!("some crates cannot be published.\n\
- `{}` is marked as unpublishable", pkg.name());
+ bail!(
+ "some crates cannot be published.\n\
+ `{}` is marked as unpublishable",
+ pkg.name()
+ );
}
}
bail!("published crates cannot contain [patch] sections");
}
- let (mut registry, reg_id) = registry(opts.config,
- opts.token.clone(),
- opts.index.clone(),
- opts.registry.clone())?;
+ let (mut registry, reg_id) = registry(
+ opts.config,
+ opts.token.clone(),
+ opts.index.clone(),
+ opts.registry.clone(),
+ )?;
verify_dependencies(pkg, ®_id)?;
// Prepare a tarball, with a non-surpressable warning if metadata
// is missing since this is being put online.
- let tarball = ops::package(ws, &ops::PackageOpts {
- config: opts.config,
- verify: opts.verify,
- list: false,
- check_metadata: true,
- allow_dirty: opts.allow_dirty,
- target: opts.target.clone(),
- jobs: opts.jobs,
- registry: opts.registry.clone(),
- })?.unwrap();
+ let tarball = ops::package(
+ ws,
+ &ops::PackageOpts {
+ config: opts.config,
+ verify: opts.verify,
+ list: false,
+ check_metadata: true,
+ allow_dirty: opts.allow_dirty,
+ target: opts.target.clone(),
+ jobs: opts.jobs,
+ registry: opts.registry.clone(),
+ },
+ )?.unwrap();
// Upload said tarball to the specified destination
- opts.config.shell().status("Uploading", pkg.package_id().to_string())?;
- transmit(opts.config, pkg, tarball.file(), &mut registry, ®_id, opts.dry_run)?;
+ opts.config
+ .shell()
+ .status("Uploading", pkg.package_id().to_string())?;
+ transmit(
+ opts.config,
+ pkg,
+ tarball.file(),
+ &mut registry,
+ ®_id,
+ opts.dry_run,
+ )?;
Ok(())
}
-fn verify_dependencies(pkg: &Package, registry_src: &SourceId)
- -> CargoResult<()> {
+fn verify_dependencies(pkg: &Package, registry_src: &SourceId) -> CargoResult<()> {
for dep in pkg.dependencies().iter() {
if dep.source_id().is_path() {
if !dep.specified_req() {
- bail!("all path dependencies must have a version specified \
- when publishing.\ndependency `{}` does not specify \
- a version", dep.name())
+ bail!(
+ "all path dependencies must have a version specified \
+ when publishing.\ndependency `{}` does not specify \
+ a version",
+ dep.name()
+ )
}
} else if dep.source_id() != registry_src {
if dep.source_id().is_registry() {
(crate `{}` is pulled from {})", dep.name(), dep.name(), dep.source_id());
}
} else {
- bail!("crates cannot be published to crates.io with dependencies sourced from \
- a repository\neither publish `{}` as its own crate on crates.io and \
- specify a crates.io version as a dependency or pull it into this \
- repository and specify it with a path and version\n(crate `{}` has \
- repository path `{}`)", dep.name(), dep.name(), dep.source_id());
+ bail!(
+ "crates cannot be published to crates.io with dependencies sourced from \
+ a repository\neither publish `{}` as its own crate on crates.io and \
+ specify a crates.io version as a dependency or pull it into this \
+ repository and specify it with a path and version\n(crate `{}` has \
+ repository path `{}`)",
+ dep.name(),
+ dep.name(),
+ dep.source_id()
+ );
}
}
}
Ok(())
}
-fn transmit(config: &Config,
- pkg: &Package,
- tarball: &File,
- registry: &mut Registry,
- registry_id: &SourceId,
- dry_run: bool) -> CargoResult<()> {
-
- let deps = pkg.dependencies().iter().map(|dep| {
-
- // If the dependency is from a different registry, then include the
- // registry in the dependency.
- let dep_registry_id = match dep.registry_id() {
- Some(id) => id,
- None => bail!("dependency missing registry ID"),
- };
- let dep_registry = if dep_registry_id != registry_id {
- Some(dep_registry_id.url().to_string())
- } else {
- None
- };
-
- Ok(NewCrateDependency {
- optional: dep.is_optional(),
- default_features: dep.uses_default_features(),
- name: dep.name().to_string(),
- features: dep.features().to_vec(),
- version_req: dep.version_req().to_string(),
- target: dep.platform().map(|s| s.to_string()),
- kind: match dep.kind() {
- Kind::Normal => "normal",
- Kind::Build => "build",
- Kind::Development => "dev",
- }.to_string(),
- registry: dep_registry,
+fn transmit(
+ config: &Config,
+ pkg: &Package,
+ tarball: &File,
+ registry: &mut Registry,
+ registry_id: &SourceId,
+ dry_run: bool,
+) -> CargoResult<()> {
+ let deps = pkg.dependencies()
+ .iter()
+ .map(|dep| {
+ // If the dependency is from a different registry, then include the
+ // registry in the dependency.
+ let dep_registry_id = match dep.registry_id() {
+ Some(id) => id,
+ None => bail!("dependency missing registry ID"),
+ };
+ let dep_registry = if dep_registry_id != registry_id {
+ Some(dep_registry_id.url().to_string())
+ } else {
+ None
+ };
+
+ Ok(NewCrateDependency {
+ optional: dep.is_optional(),
+ default_features: dep.uses_default_features(),
+ name: dep.name().to_string(),
+ features: dep.features().to_vec(),
+ version_req: dep.version_req().to_string(),
+ target: dep.platform().map(|s| s.to_string()),
+ kind: match dep.kind() {
+ Kind::Normal => "normal",
+ Kind::Build => "build",
+ Kind::Development => "dev",
+ }.to_string(),
+ registry: dep_registry,
+ })
})
- }).collect::<CargoResult<Vec<NewCrateDependency>>>()?;
+ .collect::<CargoResult<Vec<NewCrateDependency>>>()?;
let manifest = pkg.manifest();
let ManifestMetadata {
- ref authors, ref description, ref homepage, ref documentation,
- ref keywords, ref readme, ref repository, ref license, ref license_file,
- ref categories, ref badges, ref links,
+ ref authors,
+ ref description,
+ ref homepage,
+ ref documentation,
+ ref keywords,
+ ref readme,
+ ref repository,
+ ref license,
+ ref license_file,
+ ref categories,
+ ref badges,
+ ref links,
} = *manifest.metadata();
let readme_content = match *readme {
Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?),
return Ok(());
}
- let publish = registry.publish(&NewCrate {
- name: pkg.name().to_string(),
- vers: pkg.version().to_string(),
- deps,
- features: pkg.summary().features().clone(),
- authors: authors.clone(),
- description: description.clone(),
- homepage: homepage.clone(),
- documentation: documentation.clone(),
- keywords: keywords.clone(),
- categories: categories.clone(),
- readme: readme_content,
- readme_file: readme.clone(),
- repository: repository.clone(),
- license: license.clone(),
- license_file: license_file.clone(),
- badges: badges.clone(),
- links: links.clone(),
- }, tarball);
+ let publish = registry.publish(
+ &NewCrate {
+ name: pkg.name().to_string(),
+ vers: pkg.version().to_string(),
+ deps,
+ features: pkg.summary().features().clone(),
+ authors: authors.clone(),
+ description: description.clone(),
+ homepage: homepage.clone(),
+ documentation: documentation.clone(),
+ keywords: keywords.clone(),
+ categories: categories.clone(),
+ readme: readme_content,
+ readme_file: readme.clone(),
+ repository: repository.clone(),
+ license: license.clone(),
+ license_file: license_file.clone(),
+ badges: badges.clone(),
+ links: links.clone(),
+ },
+ tarball,
+ );
match publish {
Ok(warnings) => {
if !warnings.invalid_categories.is_empty() {
- let msg = format!("\
- the following are not valid category slugs and were \
- ignored: {}. Please see https://crates.io/category_slugs \
- for the list of all category slugs. \
- ", warnings.invalid_categories.join(", "));
+ let msg = format!(
+ "\
+ the following are not valid category slugs and were \
+ ignored: {}. Please see https://crates.io/category_slugs \
+ for the list of all category slugs. \
+ ",
+ warnings.invalid_categories.join(", ")
+ );
config.shell().warn(&msg)?;
}
if !warnings.invalid_badges.is_empty() {
- let msg = format!("\
- the following are not valid badges and were ignored: {}. \
- Either the badge type specified is unknown or a required \
- attribute is missing. Please see \
- http://doc.crates.io/manifest.html#package-metadata \
- for valid badge types and their required attributes.",
- warnings.invalid_badges.join(", "));
+ let msg = format!(
+ "\
+ the following are not valid badges and were ignored: {}. \
+ Either the badge type specified is unknown or a required \
+ attribute is missing. Please see \
+ http://doc.crates.io/manifest.html#package-metadata \
+ for valid badge types and their required attributes.",
+ warnings.invalid_badges.join(", ")
+ );
config.shell().warn(&msg)?;
}
Ok(())
- },
+ }
Err(e) => Err(e),
}
}
-pub fn registry_configuration(config: &Config,
- registry: Option<String>) -> CargoResult<RegistryConfig> {
-
+pub fn registry_configuration(
+ config: &Config,
+ registry: Option<String>,
+) -> CargoResult<RegistryConfig> {
let (index, token) = match registry {
- Some(registry) => {
- (Some(config.get_registry_index(®istry)?.to_string()),
- config.get_string(&format!("registries.{}.token", registry))?.map(|p| p.val))
- }
+ Some(registry) => (
+ Some(config.get_registry_index(®istry)?.to_string()),
+ config
+ .get_string(&format!("registries.{}.token", registry))?
+ .map(|p| p.val),
+ ),
None => {
// Checking out for default index and token
- (config.get_string("registry.index")?.map(|p| p.val),
- config.get_string("registry.token")?.map(|p| p.val))
+ (
+ config.get_string("registry.index")?.map(|p| p.val),
+ config.get_string("registry.token")?.map(|p| p.val),
+ )
}
};
- Ok(RegistryConfig {
- index,
- token
- })
+ Ok(RegistryConfig { index, token })
}
-pub fn registry(config: &Config,
- token: Option<String>,
- index: Option<String>,
- registry: Option<String>) -> CargoResult<(Registry, SourceId)> {
+pub fn registry(
+ config: &Config,
+ token: Option<String>,
+ index: Option<String>,
+ registry: Option<String>,
+) -> CargoResult<(Registry, SourceId)> {
// Parse all configuration options
let RegistryConfig {
token: token_config,
};
let api_host = {
let mut src = RegistrySource::remote(&sid, config);
- src.update().chain_err(|| {
- format!("failed to update {}", sid)
- })?;
+ src.update()
+ .chain_err(|| format!("failed to update {}", sid))?;
(src.config()?).unwrap().api.unwrap()
};
let handle = http_handle(config)?;
/// Create a new HTTP handle with appropriate global configuration for cargo.
pub fn http_handle(config: &Config) -> CargoResult<Easy> {
if config.frozen() {
- bail!("attempting to make an HTTP request, but --frozen was \
- specified")
+ bail!(
+ "attempting to make an HTTP request, but --frozen was \
+ specified"
+ )
}
if !config.network_allowed() {
bail!("can't make HTTP request in the offline mode")
/// via environment variables are picked up by libcurl.
fn http_proxy(config: &Config) -> CargoResult<Option<String>> {
if let Some(s) = config.get_string("http.proxy")? {
- return Ok(Some(s.val))
+ return Ok(Some(s.val));
}
if let Ok(cfg) = git2::Config::open_default() {
if let Ok(s) = cfg.get_str("http.proxy") {
- return Ok(Some(s.to_string()))
+ return Ok(Some(s.to_string()));
}
}
Ok(None)
if http_proxy(config)?.is_some() {
Ok(true)
} else {
- Ok(["http_proxy", "HTTP_PROXY",
- "https_proxy", "HTTPS_PROXY"].iter().any(|v| env::var(v).is_ok()))
+ Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"]
+ .iter()
+ .any(|v| env::var(v).is_ok()))
}
}
fn http_timeout(config: &Config) -> CargoResult<Option<i64>> {
if let Some(s) = config.get_i64("http.timeout")? {
- return Ok(Some(s.val))
+ return Ok(Some(s.val));
}
Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok()))
}
-pub fn registry_login(config: &Config,
- token: String,
- registry: Option<String>) -> CargoResult<()> {
+pub fn registry_login(config: &Config, token: String, registry: Option<String>) -> CargoResult<()> {
let RegistryConfig {
- token: old_token,
- ..
+ token: old_token, ..
} = registry_configuration(config, registry.clone())?;
if let Some(old_token) = old_token {
}
};
- let (mut registry, _) = registry(config,
- opts.token.clone(),
- opts.index.clone(),
- opts.registry.clone())?;
+ let (mut registry, _) = registry(
+ config,
+ opts.token.clone(),
+ opts.index.clone(),
+ opts.registry.clone(),
+ )?;
if let Some(ref v) = opts.to_add {
let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
- let msg = registry.add_owners(&name, &v).map_err(|e| {
- format_err!("failed to invite owners to crate {}: {}", name, e)
- })?;
+ let msg = registry
+ .add_owners(&name, &v)
+ .map_err(|e| format_err!("failed to invite owners to crate {}: {}", name, e))?;
config.shell().status("Owner", msg)?;
}
if let Some(ref v) = opts.to_remove {
let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
- config.shell().status("Owner", format!("removing {:?} from crate {}",
- v, name))?;
- registry.remove_owners(&name, &v).chain_err(|| {
- format!("failed to remove owners from crate {}", name)
- })?;
+ config
+ .shell()
+ .status("Owner", format!("removing {:?} from crate {}", v, name))?;
+ registry
+ .remove_owners(&name, &v)
+ .chain_err(|| format!("failed to remove owners from crate {}", name))?;
}
if opts.list {
- let owners = registry.list_owners(&name).chain_err(|| {
- format!("failed to list owners of crate {}", name)
- })?;
+ let owners = registry
+ .list_owners(&name)
+ .chain_err(|| format!("failed to list owners of crate {}", name))?;
for owner in owners.iter() {
print!("{}", owner.login);
match (owner.name.as_ref(), owner.email.as_ref()) {
(Some(name), Some(email)) => println!(" ({} <{}>)", name, email),
- (Some(s), None) |
- (None, Some(s)) => println!(" ({})", s),
+ (Some(s), None) | (None, Some(s)) => println!(" ({})", s),
(None, None) => println!(),
}
}
Ok(())
}
-pub fn yank(config: &Config,
- krate: Option<String>,
- version: Option<String>,
- token: Option<String>,
- index: Option<String>,
- undo: bool,
- reg: Option<String>) -> CargoResult<()> {
+pub fn yank(
+ config: &Config,
+ krate: Option<String>,
+ version: Option<String>,
+ token: Option<String>,
+ index: Option<String>,
+ undo: bool,
+ reg: Option<String>,
+) -> CargoResult<()> {
let name = match krate {
Some(name) => name,
None => {
};
let version = match version {
Some(v) => v,
- None => bail!("a version must be specified to yank")
+ None => bail!("a version must be specified to yank"),
};
let (mut registry, _) = registry(config, token, index, reg)?;
if undo {
- config.shell().status("Unyank", format!("{}:{}", name, version))?;
- registry.unyank(&name, &version).chain_err(|| {
- "failed to undo a yank"
- })?;
+ config
+ .shell()
+ .status("Unyank", format!("{}:{}", name, version))?;
+ registry
+ .unyank(&name, &version)
+ .chain_err(|| "failed to undo a yank")?;
} else {
- config.shell().status("Yank", format!("{}:{}", name, version))?;
- registry.yank(&name, &version).chain_err(|| {
- "failed to yank"
- })?;
+ config
+ .shell()
+ .status("Yank", format!("{}:{}", name, version))?;
+ registry
+ .yank(&name, &version)
+ .chain_err(|| "failed to yank")?;
}
Ok(())
}
-pub fn search(query: &str,
- config: &Config,
- index: Option<String>,
- limit: u32,
- reg: Option<String>) -> CargoResult<()> {
+pub fn search(
+ query: &str,
+ config: &Config,
+ index: Option<String>,
+ limit: u32,
+ reg: Option<String>,
+) -> CargoResult<()> {
fn truncate_with_ellipsis(s: &str, max_width: usize) -> String {
// We should truncate at grapheme-boundary and compute character-widths,
// yet the dependencies on unicode-segmentation and unicode-width are
}
let (mut registry, _) = registry(config, None, index, reg)?;
- let (crates, total_crates) = registry.search(query, limit).chain_err(|| {
- "failed to retrieve search results from the registry"
- })?;
+ let (crates, total_crates) = registry
+ .search(query, limit)
+ .chain_err(|| "failed to retrieve search results from the registry")?;
- let names = crates.iter()
+ let names = crates
+ .iter()
.map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version))
.collect::<Vec<String>>();
- let description_margin = names.iter()
- .map(|s| s.len() + 4)
- .max()
- .unwrap_or_default();
+ let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default();
let description_length = cmp::max(80, 128 - description_margin);
- let descriptions = crates.iter()
- .map(|krate|
- krate.description.as_ref().map(|desc|
- truncate_with_ellipsis(&desc.replace("\n", " "), description_length)));
+ let descriptions = crates.iter().map(|krate| {
+ krate
+ .description
+ .as_ref()
+ .map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length))
+ });
for (name, description) in names.into_iter().zip(descriptions) {
let line = match description {
Some(desc) => {
- let space = repeat(' ').take(description_margin - name.len())
- .collect::<String>();
+ let space = repeat(' ')
+ .take(description_margin - name.len())
+ .collect::<String>();
name + &space + "# " + &desc
}
- None => name
+ None => name,
};
println!("{}", line);
}
let search_max_limit = 100;
if total_crates > u32::from(limit) && limit < search_max_limit {
- println!("... and {} crates more (use --limit N to see more)",
- total_crates - u32::from(limit));
+ println!(
+ "... and {} crates more (use --limit N to see more)",
+ total_crates - u32::from(limit)
+ );
} else if total_crates > u32::from(limit) && limit >= search_max_limit {
- println!("... and {} crates more (go to http://crates.io/search?q={} to see more)",
- total_crates - u32::from(limit),
- percent_encode(query.as_bytes(), QUERY_ENCODE_SET));
+ println!(
+ "... and {} crates more (go to http://crates.io/search?q={} to see more)",
+ total_crates - u32::from(limit),
+ percent_encode(query.as_bytes(), QUERY_ENCODE_SET)
+ );
}
Ok(())
use core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace};
use core::registry::PackageRegistry;
-use core::resolver::{self, Resolve, Method};
+use core::resolver::{self, Method, Resolve};
use sources::PathSource;
use ops;
use util::profile;
/// Resolves dependencies for some packages of the workspace,
/// taking into account `paths` overrides and activated features.
-pub fn resolve_ws_precisely<'a>(ws: &Workspace<'a>,
- source: Option<Box<Source + 'a>>,
- features: &[String],
- all_features: bool,
- no_default_features: bool,
- specs: &[PackageIdSpec])
- -> CargoResult<(PackageSet<'a>, Resolve)> {
+pub fn resolve_ws_precisely<'a>(
+ ws: &Workspace<'a>,
+ source: Option<Box<Source + 'a>>,
+ features: &[String],
+ all_features: bool,
+ no_default_features: bool,
+ specs: &[PackageIdSpec],
+) -> CargoResult<(PackageSet<'a>, Resolve)> {
let features = Method::split_features(features);
let method = if all_features {
Method::Everything
resolve_ws_with_method(ws, source, method, specs)
}
-pub fn resolve_ws_with_method<'a>(ws: &Workspace<'a>,
- source: Option<Box<Source + 'a>>,
- method: Method,
- specs: &[PackageIdSpec])
- -> CargoResult<(PackageSet<'a>, Resolve)> {
+pub fn resolve_ws_with_method<'a>(
+ ws: &Workspace<'a>,
+ source: Option<Box<Source + 'a>>,
+ method: Method,
+ specs: &[PackageIdSpec],
+) -> CargoResult<(PackageSet<'a>, Resolve)> {
let mut registry = PackageRegistry::new(ws.config())?;
if let Some(source) = source {
registry.add_preloaded(source);
add_overrides(&mut registry, ws)?;
for &(ref replace_spec, ref dep) in ws.root_replace() {
- if !resolve.iter().any(|r| replace_spec.matches(r) && !dep.matches_id(r)) {
- ws.config().shell().warn(
- format!("package replacement is not used: {}", replace_spec)
- )?
+ if !resolve
+ .iter()
+ .any(|r| replace_spec.matches(r) && !dep.matches_id(r))
+ {
+ ws.config()
+ .shell()
+ .warn(format!("package replacement is not used: {}", replace_spec))?
}
}
ops::load_pkg_lockfile(ws)?
};
- let resolved_with_overrides =
- ops::resolve_with_previous(&mut registry,
- ws,
- method,
- resolve.as_ref(),
- None,
- specs,
- add_patches,
- true)?;
+ let resolved_with_overrides = ops::resolve_with_previous(
+ &mut registry,
+ ws,
+ method,
+ resolve.as_ref(),
+ None,
+ specs,
+ add_patches,
+ true,
+ )?;
let packages = get_resolved_packages(&resolved_with_overrides, registry);
Ok((packages, resolved_with_overrides))
}
-fn resolve_with_registry(ws: &Workspace, registry: &mut PackageRegistry, warn: bool)
- -> CargoResult<Resolve> {
+fn resolve_with_registry(
+ ws: &Workspace,
+ registry: &mut PackageRegistry,
+ warn: bool,
+) -> CargoResult<Resolve> {
let prev = ops::load_pkg_lockfile(ws)?;
- let resolve = resolve_with_previous(registry,
- ws,
- Method::Everything,
- prev.as_ref(),
- None,
- &[],
- true,
- warn)?;
+ let resolve = resolve_with_previous(
+ registry,
+ ws,
+ Method::Everything,
+ prev.as_ref(),
+ None,
+ &[],
+ true,
+ warn,
+ )?;
if !ws.is_ephemeral() {
ops::write_pkg_lockfile(ws, &resolve)?;
Ok(resolve)
}
-
/// Resolve all dependencies for a package using an optional previous instance
/// of resolve to guide the resolution process.
///
///
/// The previous resolve normally comes from a lockfile. This function does not
/// read or write lockfiles from the filesystem.
-pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
- ws: &Workspace,
- method: Method,
- previous: Option<&'a Resolve>,
- to_avoid: Option<&HashSet<&'a PackageId>>,
- specs: &[PackageIdSpec],
- register_patches: bool,
- warn: bool)
- -> CargoResult<Resolve> {
+pub fn resolve_with_previous<'a>(
+ registry: &mut PackageRegistry,
+ ws: &Workspace,
+ method: Method,
+ previous: Option<&'a Resolve>,
+ to_avoid: Option<&HashSet<&'a PackageId>>,
+ specs: &[PackageIdSpec],
+ register_patches: bool,
+ warn: bool,
+) -> CargoResult<Resolve> {
// Here we place an artificial limitation that all non-registry sources
// cannot be locked at more than one revision. This means that if a git
// repository provides more than one package, they must all be updated in
// different
let mut to_avoid_sources = HashSet::new();
if let Some(to_avoid) = to_avoid {
- to_avoid_sources.extend(to_avoid.iter()
- .map(|p| p.source_id())
- .filter(|s| !s.is_registry()));
+ to_avoid_sources.extend(
+ to_avoid
+ .iter()
+ .map(|p| p.source_id())
+ .filter(|s| !s.is_registry()),
+ );
}
let ref keep = |p: &&'a PackageId| {
if let Some(r) = previous {
trace!("previous: {:?}", r);
for node in r.iter().filter(keep) {
- let deps = r.deps_not_replaced(node)
- .filter(keep)
- .cloned().collect();
+ let deps = r.deps_not_replaced(node).filter(keep).cloned().collect();
registry.register_lock(node.clone(), deps);
}
}
Some(r) => r,
None => {
registry.patch(url, patches)?;
- continue
+ continue;
}
};
- let patches = patches.iter().map(|dep| {
- let unused = previous.unused_patches();
- let candidates = previous.iter().chain(unused);
- match candidates.filter(keep).find(|id| dep.matches_id(id)) {
- Some(id) => {
- let mut dep = dep.clone();
- dep.lock_to(id);
- dep
+ let patches = patches
+ .iter()
+ .map(|dep| {
+ let unused = previous.unused_patches();
+ let candidates = previous.iter().chain(unused);
+ match candidates.filter(keep).find(|id| dep.matches_id(id)) {
+ Some(id) => {
+ let mut dep = dep.clone();
+ dep.lock_to(id);
+ dep
+ }
+ None => dep.clone(),
}
- None => dep.clone(),
- }
- }).collect::<Vec<_>>();
+ })
+ .collect::<Vec<_>>();
registry.patch(url, &patches)?;
}
if specs.iter().any(|spec| spec.matches(member_id)) {
base
} else {
- continue
+ continue;
}
}
}
let root_replace = ws.root_replace();
let replace = match previous {
- Some(r) => {
- root_replace.iter().map(|&(ref spec, ref dep)| {
+ Some(r) => root_replace
+ .iter()
+ .map(|&(ref spec, ref dep)| {
for (key, val) in r.replacements().iter() {
if spec.matches(key) && dep.matches_id(val) && keep(&val) {
let mut dep = dep.clone();
dep.lock_to(val);
- return (spec.clone(), dep)
+ return (spec.clone(), dep);
}
}
(spec.clone(), dep.clone())
- }).collect::<Vec<_>>()
- }
+ })
+ .collect::<Vec<_>>(),
None => root_replace.to_vec(),
};
- let mut resolved = resolver::resolve(&summaries,
- &replace,
- registry,
- Some(ws.config()),
- warn)?;
+ let mut resolved = resolver::resolve(&summaries, &replace, registry, Some(ws.config()), warn)?;
resolved.register_used_patches(registry.patches());
if let Some(previous) = previous {
resolved.merge_from(previous)?;
/// Read the `paths` configuration variable to discover all path overrides that
/// have been configured.
-fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
- ws: &Workspace<'a>) -> CargoResult<()> {
+fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, ws: &Workspace<'a>) -> CargoResult<()> {
let paths = match ws.config().get_list("paths")? {
Some(list) => list,
- None => return Ok(())
+ None => return Ok(()),
};
let paths = paths.val.iter().map(|&(ref s, ref p)| {
let id = SourceId::for_path(&path)?;
let mut source = PathSource::new_recursive(&path, &id, ws.config());
source.update().chain_err(|| {
- format!("failed to update path override `{}` \
- (defined in `{}`)", path.display(),
- definition.display())
+ format!(
+ "failed to update path override `{}` \
+ (defined in `{}`)",
+ path.display(),
+ definition.display()
+ )
})?;
registry.add_override(Box::new(source));
}
Ok(())
}
-fn get_resolved_packages<'a>(resolve: &Resolve,
- registry: PackageRegistry<'a>)
- -> PackageSet<'a> {
+fn get_resolved_packages<'a>(resolve: &Resolve, registry: PackageRegistry<'a>) -> PackageSet<'a> {
let ids: Vec<PackageId> = resolve.iter().cloned().collect();
registry.get(&ids)
}
-
use url::Url;
-use core::{Source, SourceId, GitReference};
+use core::{GitReference, Source, SourceId};
use sources::ReplacedSource;
use util::{Config, ToUrl};
use util::config::ConfigValue;
id2name: HashMap::new(),
config,
};
- base.add("crates-io", SourceConfig {
- id: SourceId::crates_io(config)?,
- replace_with: None,
- });
+ base.add(
+ "crates-io",
+ SourceConfig {
+ id: SourceId::crates_io(config)?,
+ replace_with: None,
+ },
+ );
Ok(base)
}
loop {
let cfg = match self.cfgs.get(name) {
Some(cfg) => cfg,
- None => bail!("could not find a configured source with the \
- name `{}` when attempting to lookup `{}` \
- (configuration in `{}`)",
- name, orig_name, path.display()),
+ None => bail!(
+ "could not find a configured source with the \
+ name `{}` when attempting to lookup `{}` \
+ (configuration in `{}`)",
+ name,
+ orig_name,
+ path.display()
+ ),
};
match cfg.replace_with {
Some((ref s, ref p)) => {
}
None if *id == cfg.id => return Ok(id.load(self.config)?),
None => {
- new_id = cfg.id.with_precise(id.precise()
- .map(|s| s.to_string()));
- break
+ new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string()));
+ break;
}
}
debug!("following pointer to {}", name);
if name == orig_name {
- bail!("detected a cycle of `replace-with` sources, the source \
- `{}` is eventually replaced with itself \
- (configuration in `{}`)", name, path.display())
+ bail!(
+ "detected a cycle of `replace-with` sources, the source \
+ `{}` is eventually replaced with itself \
+ (configuration in `{}`)",
+ name,
+ path.display()
+ )
}
}
let new_src = new_id.load(self.config)?;
let old_src = id.load(self.config)?;
if !new_src.supports_checksums() && old_src.supports_checksums() {
- bail!("\
+ bail!(
+ "\
cannot replace `{orig}` with `{name}`, the source `{orig}` supports \
checksums, but `{name}` does not
a lock file compatible with `{orig}` cannot be generated in this situation
-", orig = orig_name, name = name);
+",
+ orig = orig_name,
+ name = name
+ );
}
if old_src.requires_precise() && id.precise().is_none() {
- bail!("\
+ bail!(
+ "\
the source {orig} requires a lock file to be present first before it can be
used against vendored source code
remove the source replacement configuration, generate a lock file, and then
restore the source replacement configuration to continue the build
-", orig = orig_name);
+",
+ orig = orig_name
+ );
}
Ok(Box::new(ReplacedSource::new(id, &new_id, new_src)))
srcs.push(SourceId::for_registry(&url)?);
}
if let Some(val) = table.get("local-registry") {
- let (s, path) = val.string(&format!("source.{}.local-registry",
- name))?;
+ let (s, path) = val.string(&format!("source.{}.local-registry", name))?;
let mut path = path.to_path_buf();
path.pop();
path.pop();
srcs.push(SourceId::for_local_registry(&path)?);
}
if let Some(val) = table.get("directory") {
- let (s, path) = val.string(&format!("source.{}.directory",
- name))?;
+ let (s, path) = val.string(&format!("source.{}.directory", name))?;
let mut path = path.to_path_buf();
path.pop();
path.pop();
};
let reference = match try("branch")? {
Some(b) => GitReference::Branch(b.0.to_string()),
- None => {
- match try("tag")? {
- Some(b) => GitReference::Tag(b.0.to_string()),
- None => {
- match try("rev")? {
- Some(b) => GitReference::Rev(b.0.to_string()),
- None => GitReference::Branch("master".to_string()),
- }
- }
- }
- }
+ None => match try("tag")? {
+ Some(b) => GitReference::Tag(b.0.to_string()),
+ None => match try("rev")? {
+ Some(b) => GitReference::Rev(b.0.to_string()),
+ None => GitReference::Branch("master".to_string()),
+ },
+ },
};
srcs.push(SourceId::for_git(&url, reference)?);
}
let mut srcs = srcs.into_iter();
let src = srcs.next().ok_or_else(|| {
- format_err!("no source URL specified for `source.{}`, need \
- either `registry` or `local-registry` defined",
- name)
+ format_err!(
+ "no source URL specified for `source.{}`, need \
+ either `registry` or `local-registry` defined",
+ name
+ )
})?;
if srcs.next().is_some() {
bail!("more than one source URL specified for `source.{}`", name)
let mut replace_with = None;
if let Some(val) = table.get("replace-with") {
- let (s, path) = val.string(&format!("source.{}.replace-with",
- name))?;
+ let (s, path) = val.string(&format!("source.{}.replace-with", name))?;
replace_with = Some((s.to_string(), path.to_path_buf()));
}
- self.add(name, SourceConfig {
- id: src,
- replace_with,
- });
+ self.add(
+ name,
+ SourceConfig {
+ id: src,
+ replace_with,
+ },
+ );
return Ok(());
fn url(cfg: &ConfigValue, key: &str) -> CargoResult<Url> {
let (url, path) = cfg.string(key)?;
let url = url.to_url().chain_err(|| {
- format!("configuration key `{}` specified an invalid \
- URL (in {})", key, path.display())
-
+ format!(
+ "configuration key `{}` specified an invalid \
+ URL (in {})",
+ key,
+ path.display()
+ )
})?;
Ok(url)
}
use serde_json;
-use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};
+use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary};
use sources::PathSource;
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
}
impl<'cfg> DirectorySource<'cfg> {
- pub fn new(path: &Path, id: &SourceId, config: &'cfg Config)
- -> DirectorySource<'cfg> {
+ pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
DirectorySource {
source_id: id.clone(),
root: path.to_path_buf(),
}
impl<'cfg> Registry for DirectorySource<'cfg> {
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()> {
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let packages = self.packages.values().map(|p| &p.0);
let matches = packages.filter(|pkg| dep.matches(pkg.summary()));
for summary in matches.map(|pkg| pkg.summary().clone()) {
fn update(&mut self) -> CargoResult<()> {
self.packages.clear();
let entries = self.root.read_dir().chain_err(|| {
- format!("failed to read root of directory source: {}",
- self.root.display())
+ format!(
+ "failed to read root of directory source: {}",
+ self.root.display()
+ )
})?;
for entry in entries {
// (rust-lang/cargo#3414).
if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
if s.starts_with('.') {
- continue
+ continue;
}
}
// downside of accidentally misconfigured vendor directories
// silently returning less crates.
if !path.join("Cargo.toml").exists() {
- continue
+ continue;
}
let mut src = PathSource::new(&path, &self.source_id, self.config);
let cksum_file = path.join(".cargo-checksum.json");
let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
- format!("failed to load checksum `.cargo-checksum.json` \
- of {} v{}",
- pkg.package_id().name(),
- pkg.package_id().version())
-
+ format!(
+ "failed to load checksum `.cargo-checksum.json` \
+ of {} v{}",
+ pkg.package_id().name(),
+ pkg.package_id().version()
+ )
})?;
let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| {
- format!("failed to decode `.cargo-checksum.json` of \
- {} v{}",
- pkg.package_id().name(),
- pkg.package_id().version())
+ format!(
+ "failed to decode `.cargo-checksum.json` of \
+ {} v{}",
+ pkg.package_id().name(),
+ pkg.package_id().version()
+ )
})?;
let mut manifest = pkg.manifest().clone();
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
- self.packages.get(id).map(|p| &p.0).cloned().ok_or_else(|| {
- format_err!("failed to find package with id: {}", id)
- })
+ self.packages
+ .get(id)
+ .map(|p| &p.0)
+ .cloned()
+ .ok_or_else(|| format_err!("failed to find package with id: {}", id))
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
fn verify(&self, id: &PackageId) -> CargoResult<()> {
let (pkg, cksum) = match self.packages.get(id) {
Some(&(ref pkg, ref cksum)) => (pkg, cksum),
- None => bail!("failed to find entry for `{}` in directory source",
- id),
+ None => bail!("failed to find entry for `{}` in directory source", id),
};
let mut buf = [0; 16 * 1024];
n => h.update(&buf[..n]),
}
}
- })().chain_err(|| {
- format!("failed to calculate checksum of: {}",
- file.display())
- })?;
+ })()
+ .chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?;
let actual = hex::encode(h.finish());
if &*actual != cksum {
- bail!("\
- the listed checksum of `{}` has changed:\n\
- expected: {}\n\
- actual: {}\n\
- \n\
- directory sources are not intended to be edited, if \
- modifications are required then it is recommended \
- that [replace] is used with a forked copy of the \
- source\
- ", file.display(), cksum, actual);
+ bail!(
+ "\
+ the listed checksum of `{}` has changed:\n\
+ expected: {}\n\
+ actual: {}\n\
+ \n\
+ directory sources are not intended to be edited, if \
+ modifications are required then it is recommended \
+ that [replace] is used with a forked copy of the \
+ source\
+ ",
+ file.display(),
+ cksum,
+ actual
+ );
}
}
-pub use self::utils::{GitRemote, GitDatabase, GitCheckout, GitRevision, fetch};
-pub use self::source::{GitSource, canonicalize_url};
+pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote, GitRevision};
+pub use self::source::{canonicalize_url, GitSource};
mod utils;
mod source;
use core::source::{Source, SourceId};
use core::GitReference;
-use core::{Package, PackageId, Summary, Registry, Dependency};
+use core::{Dependency, Package, PackageId, Registry, Summary};
use util::Config;
use util::errors::CargoResult;
use util::hex::short_hash;
}
impl<'cfg> GitSource<'cfg> {
- pub fn new(source_id: &SourceId,
- config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
+ pub fn new(source_id: &SourceId, config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
assert!(source_id.is_git(), "id is not git, id={}", source_id);
let remote = GitRemote::new(source_id.url());
Ok(source)
}
- pub fn url(&self) -> &Url { self.remote.url() }
+ pub fn url(&self) -> &Url {
+ self.remote.url()
+ }
pub fn read_packages(&mut self) -> CargoResult<Vec<Package>> {
if self.path_source.is_none() {
fn ident(url: &Url) -> CargoResult<String> {
let url = canonicalize_url(url)?;
- let ident = url.path_segments().and_then(|mut s| s.next_back()).unwrap_or("");
+ let ident = url.path_segments()
+ .and_then(|mut s| s.next_back())
+ .unwrap_or("");
- let ident = if ident == "" {
- "_empty"
- } else {
- ident
- };
+ let ident = if ident == "" { "_empty" } else { ident };
Ok(format!("{}-{}", ident, short_hash(&url)))
}
// cannot-be-a-base-urls are not supported
// eg. github.com:rust-lang-nursery/rustfmt.git
if url.cannot_be_a_base() {
- bail!("invalid url `{}`: cannot-be-a-base-URLs are not supported", url)
+ bail!(
+ "invalid url `{}`: cannot-be-a-base-URLs are not supported",
+ url
+ )
}
// Strip a trailing slash
match self.reference.pretty_ref() {
Some(s) => write!(f, " ({})", s),
- None => Ok(())
+ None => Ok(()),
}
}
}
impl<'cfg> Registry for GitSource<'cfg> {
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()> {
- let src = self.path_source.as_mut()
- .expect("BUG: update() must be called before query()");
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+ let src = self.path_source
+ .as_mut()
+ .expect("BUG: update() must be called before query()");
src.query(dep, f)
}
}
fn update(&mut self) -> CargoResult<()> {
- let lock = self.config.git_path()
- .open_rw(".cargo-lock-git", self.config, "the git checkouts")?;
+ let lock =
+ self.config
+ .git_path()
+ .open_rw(".cargo-lock-git", self.config, "the git checkouts")?;
let db_path = lock.parent().join("db").join(&self.ident);
if self.config.cli_unstable().offline && !db_path.exists() {
- bail!("can't checkout from '{}': you are in the offline mode (-Z offline)",
- self.remote.url());
+ bail!(
+ "can't checkout from '{}': you are in the offline mode (-Z offline)",
+ self.remote.url()
+ );
}
// Resolve our reference to an actual revision, and check if the
// database pinned at that revision, and if we don't we issue an update
// to try to find the revision.
let actual_rev = self.remote.rev_for(&db_path, &self.reference);
- let should_update = actual_rev.is_err() ||
- self.source_id.precise().is_none();
+ let should_update = actual_rev.is_err() || self.source_id.precise().is_none();
let (db, actual_rev) = if should_update && !self.config.cli_unstable().offline {
- self.config.shell().status("Updating",
- format!("git repository `{}`", self.remote.url()))?;
+ self.config.shell().status(
+ "Updating",
+ format!("git repository `{}`", self.remote.url()),
+ )?;
trace!("updating git source `{:?}`", self.remote);
- self.remote.checkout(&db_path, &self.reference, self.config)?
+ self.remote
+ .checkout(&db_path, &self.reference, self.config)?
} else {
(self.remote.db_at(&db_path)?, actual_rev.unwrap())
};
// https://github.com/servo/servo/pull/14397
let short_id = db.to_short_id(actual_rev.clone()).unwrap();
- let checkout_path = lock.parent().join("checkouts")
- .join(&self.ident).join(short_id.as_str());
+ let checkout_path = lock.parent()
+ .join("checkouts")
+ .join(&self.ident)
+ .join(short_id.as_str());
// Copy the database to the checkout location. After this we could drop
// the lock on the database as we no longer needed it, but we leave it
db.copy_to(actual_rev.clone(), &checkout_path, self.config)?;
let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
- let path_source = PathSource::new_recursive(&checkout_path,
- &source_id,
- self.config);
+ let path_source = PathSource::new_recursive(&checkout_path, &source_id, self.config);
self.path_source = Some(path_source);
self.rev = Some(actual_rev);
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
- trace!("getting packages for package id `{}` from `{:?}`", id,
- self.remote);
- self.path_source.as_mut()
- .expect("BUG: update() must be called before get()")
- .download(id)
+ trace!(
+ "getting packages for package id `{}` from `{:?}`",
+ id,
+ self.remote
+ );
+ self.path_source
+ .as_mut()
+ .expect("BUG: update() must be called before get()")
+ .download(id)
}
fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {
use url::Url;
use core::GitReference;
-use util::{ToUrl, internal, Config, network, Progress};
+use util::{internal, network, Config, Progress, ToUrl};
use util::paths;
-use util::errors::{CargoResult, CargoResultExt, CargoError};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
#[derive(PartialEq, Clone, Debug)]
pub struct GitRevision(git2::Oid);
}
fn serialize_str<T, S>(t: &T, s: S) -> Result<S::Ok, S::Error>
- where T: fmt::Display,
- S: ser::Serializer,
+where
+ T: fmt::Display,
+ S: ser::Serializer,
{
t.to_string().serialize(s)
}
/// `GitDatabase`.
#[derive(PartialEq, Clone, Debug, Serialize)]
pub struct GitRemote {
- #[serde(serialize_with = "serialize_str")]
- url: Url,
+ #[serde(serialize_with = "serialize_str")] url: Url,
}
/// `GitDatabase` is a local clone of a remote repository's database. Multiple
pub struct GitDatabase {
remote: GitRemote,
path: PathBuf,
- #[serde(skip_serializing)]
- repo: git2::Repository,
+ #[serde(skip_serializing)] repo: git2::Repository,
}
/// `GitCheckout` is a local checkout of a particular revision. Calling
database: &'a GitDatabase,
location: PathBuf,
revision: GitRevision,
- #[serde(skip_serializing)]
- repo: git2::Repository,
+ #[serde(skip_serializing)] repo: git2::Repository,
}
// Implementations
&self.url
}
- pub fn rev_for(&self, path: &Path, reference: &GitReference)
- -> CargoResult<GitRevision> {
+ pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult<GitRevision> {
reference.resolve(&self.db_at(path)?.repo)
}
- pub fn checkout(&self,
- into: &Path,
- reference: &GitReference,
- cargo_config: &Config)
- -> CargoResult<(GitDatabase, GitRevision)>
- {
+ pub fn checkout(
+ &self,
+ into: &Path,
+ reference: &GitReference,
+ cargo_config: &Config,
+ ) -> CargoResult<(GitDatabase, GitRevision)> {
let mut repo_and_rev = None;
if let Ok(mut repo) = git2::Repository::open(into) {
- self.fetch_into(&mut repo, cargo_config).chain_err(|| {
- format!("failed to fetch into {}", into.display())
- })?;
+ self.fetch_into(&mut repo, cargo_config)
+ .chain_err(|| format!("failed to fetch into {}", into.display()))?;
if let Ok(rev) = reference.resolve(&repo) {
repo_and_rev = Some((repo, rev));
}
let (repo, rev) = match repo_and_rev {
Some(pair) => pair,
None => {
- let repo = self.clone_into(into, cargo_config).chain_err(|| {
- format!("failed to clone into: {}", into.display())
- })?;
+ let repo = self.clone_into(into, cargo_config)
+ .chain_err(|| format!("failed to clone into: {}", into.display()))?;
let rev = reference.resolve(&repo)?;
(repo, rev)
}
};
- Ok((GitDatabase {
- remote: self.clone(),
- path: into.to_path_buf(),
- repo,
- }, rev))
+ Ok((
+ GitDatabase {
+ remote: self.clone(),
+ path: into.to_path_buf(),
+ repo,
+ },
+ rev,
+ ))
}
pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> {
}
fs::create_dir_all(dst)?;
let mut repo = git2::Repository::init_bare(dst)?;
- fetch(&mut repo, &self.url, "refs/heads/*:refs/heads/*", cargo_config)?;
+ fetch(
+ &mut repo,
+ &self.url,
+ "refs/heads/*:refs/heads/*",
+ cargo_config,
+ )?;
Ok(repo)
}
}
impl GitDatabase {
- pub fn copy_to(&self, rev: GitRevision, dest: &Path, cargo_config: &Config)
- -> CargoResult<GitCheckout> {
+ pub fn copy_to(
+ &self,
+ rev: GitRevision,
+ dest: &Path,
+ cargo_config: &Config,
+ ) -> CargoResult<GitCheckout> {
let mut checkout = None;
if let Ok(repo) = git2::Repository::open(dest) {
let mut co = GitCheckout::new(dest, self, rev.clone(), repo);
impl GitReference {
fn resolve(&self, repo: &git2::Repository) -> CargoResult<GitRevision> {
let id = match *self {
- GitReference::Tag(ref s) => {
- (|| -> CargoResult<git2::Oid> {
- let refname = format!("refs/tags/{}", s);
- let id = repo.refname_to_id(&refname)?;
- let obj = repo.find_object(id, None)?;
- let obj = obj.peel(ObjectType::Commit)?;
- Ok(obj.id())
- })().chain_err(|| {
- format!("failed to find tag `{}`", s)
- })?
- }
+ GitReference::Tag(ref s) => (|| -> CargoResult<git2::Oid> {
+ let refname = format!("refs/tags/{}", s);
+ let id = repo.refname_to_id(&refname)?;
+ let obj = repo.find_object(id, None)?;
+ let obj = obj.peel(ObjectType::Commit)?;
+ Ok(obj.id())
+ })()
+ .chain_err(|| format!("failed to find tag `{}`", s))?,
GitReference::Branch(ref s) => {
(|| {
let b = repo.find_branch(s, git2::BranchType::Local)?;
- b.get().target().ok_or_else(|| {
- format_err!("branch `{}` did not have a target", s)
- })
- })().chain_err(|| {
- format!("failed to find branch `{}`", s)
- })?
+ b.get()
+ .target()
+ .ok_or_else(|| format_err!("branch `{}` did not have a target", s))
+ })()
+ .chain_err(|| format!("failed to find branch `{}`", s))?
}
GitReference::Rev(ref s) => {
let obj = repo.revparse_single(s)?;
}
impl<'a> GitCheckout<'a> {
- fn new(path: &Path, database: &'a GitDatabase, revision: GitRevision,
- repo: git2::Repository)
- -> GitCheckout<'a>
- {
+ fn new(
+ path: &Path,
+ database: &'a GitDatabase,
+ revision: GitRevision,
+ repo: git2::Repository,
+ ) -> GitCheckout<'a> {
GitCheckout {
location: path.to_path_buf(),
database,
}
}
- fn clone_into(into: &Path,
- database: &'a GitDatabase,
- revision: GitRevision,
- config: &Config)
- -> CargoResult<GitCheckout<'a>>
- {
+ fn clone_into(
+ into: &Path,
+ database: &'a GitDatabase,
+ revision: GitRevision,
+ config: &Config,
+ ) -> CargoResult<GitCheckout<'a>> {
let dirname = into.parent().unwrap();
- fs::create_dir_all(&dirname).chain_err(|| {
- format!("Couldn't mkdir {}", dirname.display())
- })?;
+ fs::create_dir_all(&dirname).chain_err(|| format!("Couldn't mkdir {}", dirname.display()))?;
if into.exists() {
paths::remove_dir_all(into)?;
}
info!("update submodules for: {:?}", repo.workdir().unwrap());
for mut child in repo.submodules()? {
- update_submodule(repo, &mut child, cargo_config)
- .chain_err(|| {
- format!("failed to update submodule `{}`",
- child.name().unwrap_or(""))
- })?;
+ update_submodule(repo, &mut child, cargo_config).chain_err(|| {
+ format!(
+ "failed to update submodule `{}`",
+ child.name().unwrap_or("")
+ )
+ })?;
}
Ok(())
}
- fn update_submodule(parent: &git2::Repository,
- child: &mut git2::Submodule,
- cargo_config: &Config) -> CargoResult<()> {
+ fn update_submodule(
+ parent: &git2::Repository,
+ child: &mut git2::Submodule,
+ cargo_config: &Config,
+ ) -> CargoResult<()> {
child.init(false)?;
- let url = child.url().ok_or_else(|| {
- internal("non-utf8 url for submodule")
- })?;
+ let url = child
+ .url()
+ .ok_or_else(|| internal("non-utf8 url for submodule"))?;
// A submodule which is listed in .gitmodules but not actually
// checked out will not have a head id, so we should ignore it.
let mut repo = match head_and_repo {
Ok((head, repo)) => {
if child.head_id() == head {
- return update_submodules(&repo, cargo_config)
+ return update_submodules(&repo, cargo_config);
}
repo
}
let refspec = "refs/heads/*:refs/heads/*";
let url = url.to_url()?;
fetch(&mut repo, &url, refspec, cargo_config).chain_err(|| {
- internal(format!("failed to fetch submodule `{}` from {}",
- child.name().unwrap_or(""), url))
+ internal(format!(
+ "failed to fetch submodule `{}` from {}",
+ child.name().unwrap_or(""),
+ url
+ ))
})?;
let obj = repo.find_object(head, None)?;
/// credentials until we give it a reason to not do so. To ensure we don't
/// just sit here looping forever we keep track of authentications we've
/// attempted and we don't try the same ones again.
-fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
- -> CargoResult<T>
- where F: FnMut(&mut git2::Credentials) -> CargoResult<T>
+fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult<T>
+where
+ F: FnMut(&mut git2::Credentials) -> CargoResult<T>,
{
let mut cred_helper = git2::CredentialHelper::new(url);
cred_helper.config(cfg);
if allowed.contains(git2::CredentialType::USERNAME) {
debug_assert!(username.is_none());
ssh_username_requested = true;
- return Err(git2::Error::from_str("gonna try usernames later"))
+ return Err(git2::Error::from_str("gonna try usernames later"));
}
// An "SSH_KEY" authentication indicates that we need some sort of SSH
let username = username.unwrap();
debug_assert!(!ssh_username_requested);
ssh_agent_attempts.push(username.to_string());
- return git2::Cred::ssh_key_from_agent(username)
+ return git2::Cred::ssh_key_from_agent(username);
}
// Sometimes libgit2 will ask for a username/password in plaintext. This
if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) {
let r = git2::Cred::credential_helper(cfg, url, username);
cred_helper_bad = Some(r.is_err());
- return r
+ return r;
}
// I'm... not sure what the DEFAULT kind of authentication is, but seems
// easy to support?
if allowed.contains(git2::CredentialType::DEFAULT) {
- return git2::Cred::default()
+ return git2::Cred::default();
}
// Whelp, we tried our best
attempts += 1;
if attempts == 1 {
ssh_agent_attempts.push(s.to_string());
- return git2::Cred::ssh_key_from_agent(&s)
+ return git2::Cred::ssh_key_from_agent(&s);
}
}
Err(git2::Error::from_str("no authentication available"))
// errors happened). Otherwise something else is funny so we bail
// out.
if attempts != 2 {
- break
+ break;
}
}
}
if res.is_ok() || !any_attempts {
- return res.map_err(From::from)
+ return res.map_err(From::from);
}
// In the case of an authentication failure (where we tried something) then
// tried.
let res = res.map_err(CargoError::from).chain_err(|| {
let mut msg = "failed to authenticate when downloading \
- repository".to_string();
+ repository"
+ .to_string();
if !ssh_agent_attempts.is_empty() {
- let names = ssh_agent_attempts.iter()
- .map(|s| format!("`{}`", s))
- .collect::<Vec<_>>()
- .join(", ");
- msg.push_str(&format!("\nattempted ssh-agent authentication, but \
- none of the usernames {} succeeded", names));
+ let names = ssh_agent_attempts
+ .iter()
+ .map(|s| format!("`{}`", s))
+ .collect::<Vec<_>>()
+ .join(", ");
+ msg.push_str(&format!(
+ "\nattempted ssh-agent authentication, but \
+ none of the usernames {} succeeded",
+ names
+ ));
}
if let Some(failed_cred_helper) = cred_helper_bad {
if failed_cred_helper {
- msg.push_str("\nattempted to find username/password via \
- git's `credential.helper` support, but failed");
+ msg.push_str(
+ "\nattempted to find username/password via \
+ git's `credential.helper` support, but failed",
+ );
} else {
- msg.push_str("\nattempted to find username/password via \
- `credential.helper`, but maybe the found \
- credentials were incorrect");
+ msg.push_str(
+ "\nattempted to find username/password via \
+ `credential.helper`, but maybe the found \
+ credentials were incorrect",
+ );
}
}
msg
Ok(res)
}
-fn reset(repo: &git2::Repository,
- obj: &git2::Object,
- config: &Config) -> CargoResult<()> {
+fn reset(repo: &git2::Repository, obj: &git2::Object, config: &Config) -> CargoResult<()> {
let mut pb = Progress::new("Checkout", config);
let mut opts = git2::build::CheckoutBuilder::new();
opts.progress(|_, cur, max| {
Ok(())
}
-pub fn with_fetch_options(git_config: &git2::Config,
- url: &Url,
- config: &Config,
- cb: &mut FnMut(git2::FetchOptions) -> CargoResult<()>)
- -> CargoResult<()>
-{
+pub fn with_fetch_options(
+ git_config: &git2::Config,
+ url: &Url,
+ config: &Config,
+ cb: &mut FnMut(git2::FetchOptions) -> CargoResult<()>,
+) -> CargoResult<()> {
let mut progress = Progress::new("Fetch", config);
network::with_retry(config, || {
with_authentication(url.as_str(), git_config, |f| {
rcb.credentials(f);
rcb.transfer_progress(|stats| {
- progress.tick(stats.indexed_objects(), stats.total_objects()).is_ok()
+ progress
+ .tick(stats.indexed_objects(), stats.total_objects())
+ .is_ok()
});
// Create a local anonymous remote in the repository to fetch the
})
}
-pub fn fetch(repo: &mut git2::Repository,
- url: &Url,
- refspec: &str,
- config: &Config) -> CargoResult<()> {
+pub fn fetch(
+ repo: &mut git2::Repository,
+ url: &Url,
+ refspec: &str,
+ config: &Config,
+) -> CargoResult<()> {
if config.frozen() {
- bail!("attempting to update a git repository, but --frozen \
- was specified")
+ bail!(
+ "attempting to update a git repository, but --frozen \
+ was specified"
+ )
}
if !config.network_allowed() {
bail!("can't update a git repository in the offline mode")
let mut handle = config.http()?.borrow_mut();
debug!("attempting github fast path for {}", url);
if github_up_to_date(&mut handle, url, &oid) {
- return Ok(())
+ return Ok(());
} else {
debug!("fast path failed, falling back to a git fetch");
}
if !repo_reinitialized && err.class() == git2::ErrorClass::Reference {
repo_reinitialized = true;
- debug!("looks like this is a corrupt repository, reinitializing \
- and trying again");
+ debug!(
+ "looks like this is a corrupt repository, reinitializing \
+ and trying again"
+ );
if reinitialize(repo).is_ok() {
- continue
+ continue;
}
}
- return Err(err.into())
+ return Err(err.into());
}
Ok(())
})
Ok(e) => e.count(),
Err(_) => {
debug!("skipping gc as pack dir appears gone");
- return Ok(())
+ return Ok(());
}
};
- let max = env::var("__CARGO_PACKFILE_LIMIT").ok()
+ let max = env::var("__CARGO_PACKFILE_LIMIT")
+ .ok()
.and_then(|s| s.parse::<usize>().ok())
.unwrap_or(100);
if entries < max {
debug!("skipping gc as there's only {} pack files", entries);
- return Ok(())
+ return Ok(());
}
// First up, try a literal `git gc` by shelling out to git. This is pretty
// likely to fail though as we may not have `git` installed. Note that
// libgit2 doesn't currently implement the gc operation, so there's no
// equivalent there.
- match Command::new("git").arg("gc").current_dir(repo.path()).output() {
+ match Command::new("git")
+ .arg("gc")
+ .current_dir(repo.path())
+ .output()
+ {
Ok(out) => {
- debug!("git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}",
- out.status,
- String::from_utf8_lossy(&out.stdout),
- String::from_utf8_lossy(&out.stderr));
+ debug!(
+ "git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}",
+ out.status,
+ String::from_utf8_lossy(&out.stdout),
+ String::from_utf8_lossy(&out.stderr)
+ );
if out.status.success() {
let new = git2::Repository::open(repo.path())?;
mem::replace(repo, new);
- return Ok(())
+ return Ok(());
}
}
Err(e) => debug!("git-gc failed to spawn: {}", e),
for entry in path.read_dir()? {
let entry = entry?;
if entry.file_name().to_str() == Some("tmp") {
- continue
+ continue;
}
let path = entry.path();
drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path)));
let username = try!(pieces.next());
let repo = try!(pieces.next());
if pieces.next().is_some() {
- return false
+ return false;
}
- let url = format!("https://api.github.com/repos/{}/{}/commits/master",
- username, repo);
+ let url = format!(
+ "https://api.github.com/repos/{}/{}/commits/master",
+ username, repo
+ );
try!(handle.get(true).ok());
try!(handle.url(&url).ok());
try!(handle.useragent("cargo").ok());
use ignore::Match;
use ignore::gitignore::GitignoreBuilder;
-use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};
+use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary};
use ops;
-use util::{self, CargoResult, internal};
+use util::{self, internal, CargoResult};
use util::Config;
pub struct PathSource<'cfg> {
///
/// This source will only return the package at precisely the `path`
/// specified, and it will be an error if there's not a package at `path`.
- pub fn new(path: &Path, id: &SourceId, config: &'cfg Config)
- -> PathSource<'cfg> {
+ pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
PathSource {
source_id: id.clone(),
path: path.to_path_buf(),
///
/// Note that this should be used with care and likely shouldn't be chosen
/// by default!
- pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config)
- -> PathSource<'cfg> {
+ pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
PathSource {
recursive: true,
- .. PathSource::new(root, id, config)
+ ..PathSource::new(root, id, config)
}
}
match self.packages.iter().find(|p| p.root() == &*self.path) {
Some(pkg) => Ok(pkg.clone()),
- None => Err(internal("no package found in source"))
+ None => Err(internal("no package found in source")),
}
}
} else {
p
};
- Pattern::new(pattern).map_err(|e| {
- format_err!("could not parse glob pattern `{}`: {}", p, e)
- })
+ Pattern::new(pattern)
+ .map_err(|e| format_err!("could not parse glob pattern `{}`: {}", p, e))
};
let glob_exclude = pkg.manifest()
let glob_should_package = |relative_path: &Path| -> bool {
fn glob_match(patterns: &Vec<Pattern>, relative_path: &Path) -> bool {
- patterns.iter().any(|pattern| pattern.matches_path(relative_path))
+ patterns
+ .iter()
+ .any(|pattern| pattern.matches_path(relative_path))
}
// include and exclude options are mutually exclusive.
let ignore_should_package = |relative_path: &Path| -> CargoResult<bool> {
// include and exclude options are mutually exclusive.
if no_include_option {
- match ignore_exclude.matched_path_or_any_parents(
- relative_path,
- /* is_dir */ false,
- ) {
+ match ignore_exclude
+ .matched_path_or_any_parents(relative_path, /* is_dir */ false)
+ {
Match::None => Ok(true),
Match::Ignore(_) => Ok(false),
Match::Whitelist(pattern) => Err(format_err!(
)),
}
} else {
- match ignore_include.matched_path_or_any_parents(
- relative_path,
- /* is_dir */ false,
- ) {
+ match ignore_include
+ .matched_path_or_any_parents(relative_path, /* is_dir */ false)
+ {
Match::None => Ok(false),
Match::Ignore(_) => Ok(true),
Match::Whitelist(pattern) => Err(format_err!(
if glob_should_package != ignore_should_package {
if glob_should_package {
if no_include_option {
- self.config
- .shell()
- .warn(format!(
- "Pattern matching for Cargo's include/exclude fields is changing and \
- file `{}` WILL be excluded in a future Cargo version.\n\
- See https://github.com/rust-lang/cargo/issues/4268 for more info",
- relative_path.display()
- ))?;
- } else {
- self.config
- .shell()
- .warn(format!(
- "Pattern matching for Cargo's include/exclude fields is changing and \
- file `{}` WILL NOT be included in a future Cargo version.\n\
- See https://github.com/rust-lang/cargo/issues/4268 for more info",
- relative_path.display()
- ))?;
- }
- } else if no_include_option {
- self.config
- .shell()
- .warn(format!(
+ self.config.shell().warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
- file `{}` WILL NOT be excluded in a future Cargo version.\n\
- See https://github.com/rust-lang/cargo/issues/4268 for more info",
+ file `{}` WILL be excluded in a future Cargo version.\n\
+ See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
- } else {
- self.config
- .shell()
- .warn(format!(
+ } else {
+ self.config.shell().warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
- file `{}` WILL be included in a future Cargo version.\n\
- See https://github.com/rust-lang/cargo/issues/4268 for more info",
+ file `{}` WILL NOT be included in a future Cargo version.\n\
+ See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
+ }
+ } else if no_include_option {
+ self.config.shell().warn(format!(
+ "Pattern matching for Cargo's include/exclude fields is changing and \
+ file `{}` WILL NOT be excluded in a future Cargo version.\n\
+ See https://github.com/rust-lang/cargo/issues/4268 for more info",
+ relative_path.display()
+ ))?;
+ } else {
+ self.config.shell().warn(format!(
+ "Pattern matching for Cargo's include/exclude fields is changing and \
+ file `{}` WILL be included in a future Cargo version.\n\
+ See https://github.com/rust-lang/cargo/issues/4268 for more info",
+ relative_path.display()
+ ))?;
}
}
// Returns Some(_) if found sibling Cargo.toml and .git folder;
// otherwise caller should fall back on full file list.
- fn discover_git_and_list_files(&self,
- pkg: &Package,
- root: &Path,
- filter: &mut FnMut(&Path) -> CargoResult<bool>)
- -> Option<CargoResult<Vec<PathBuf>>> {
+ fn discover_git_and_list_files(
+ &self,
+ pkg: &Package,
+ root: &Path,
+ filter: &mut FnMut(&Path) -> CargoResult<bool>,
+ ) -> Option<CargoResult<Vec<PathBuf>>> {
// If this package is in a git repository, then we really do want to
// query the git repository as it takes into account items such as
// .gitignore. We're not quite sure where the git repository is,
Ok(index) => index,
Err(err) => return Some(Err(err.into())),
};
- let path = util::without_prefix(root, cur)
- .unwrap().join("Cargo.toml");
+ let path = util::without_prefix(root, cur).unwrap().join("Cargo.toml");
if index.get_path(&path, 0).is_some() {
return Some(self.list_files_git(pkg, repo, filter));
}
}
// don't cross submodule boundaries
if cur.join(".git").is_dir() {
- break
+ break;
}
match cur.parent() {
Some(parent) => cur = parent,
None
}
- fn list_files_git(&self, pkg: &Package, repo: git2::Repository,
- filter: &mut FnMut(&Path) -> CargoResult<bool>)
- -> CargoResult<Vec<PathBuf>> {
+ fn list_files_git(
+ &self,
+ pkg: &Package,
+ repo: git2::Repository,
+ filter: &mut FnMut(&Path) -> CargoResult<bool>,
+ ) -> CargoResult<Vec<PathBuf>> {
warn!("list_files_git {}", pkg.package_id());
let index = repo.index()?;
- let root = repo.workdir().ok_or_else(|| {
- internal("Can't list files on a bare repository.")
- })?;
+ let root = repo.workdir()
+ .ok_or_else(|| internal("Can't list files on a bare repository."))?;
let pkg_path = pkg.root();
let mut ret = Vec::<PathBuf>::new();
opts.pathspec(suffix);
}
let statuses = repo.statuses(Some(&mut opts))?;
- let untracked = statuses.iter().filter_map(|entry| {
- match entry.status() {
- git2::Status::WT_NEW => Some((join(root, entry.path_bytes()), None)),
- _ => None,
- }
+ let untracked = statuses.iter().filter_map(|entry| match entry.status() {
+ git2::Status::WT_NEW => Some((join(root, entry.path_bytes()), None)),
+ _ => None,
});
let mut subpackages_found = Vec::new();
// bit obove via the `pathspec` function call, but we need to filter
// the entries in the index as well.
if !file_path.starts_with(pkg_path) {
- continue
+ continue;
}
match file_path.file_name().and_then(|s| s.to_str()) {
// Filter out Cargo.lock and target always, we don't want to
// package a lock file no one will ever read and we also avoid
// build artifacts
- Some("Cargo.lock") |
- Some("target") => continue,
+ Some("Cargo.lock") | Some("target") => continue,
// Keep track of all sub-packages found and also strip out all
// matches we've found so far. Note, though, that if we find
warn!("subpackage found: {}", path.display());
ret.retain(|p| !p.starts_with(path));
subpackages_found.push(path.to_path_buf());
- continue
+ continue;
}
}
// If this file is part of any other sub-package we've found so far,
// skip it.
if subpackages_found.iter().any(|p| file_path.starts_with(p)) {
- continue
+ continue;
}
if is_dir.unwrap_or_else(|| file_path.is_dir()) {
warn!(" found submodule {}", file_path.display());
let rel = util::without_prefix(&file_path, root).unwrap();
- let rel = rel.to_str().ok_or_else(|| {
- format_err!("invalid utf-8 filename: {}", rel.display())
- })?;
+ let rel = rel.to_str()
+ .ok_or_else(|| format_err!("invalid utf-8 filename: {}", rel.display()))?;
// Git submodules are currently only named through `/` path
// separators, explicitly not `\` which windows uses. Who knew?
let rel = rel.replace(r"\", "/");
use std::str;
match str::from_utf8(data) {
Ok(s) => Ok(path.join(s)),
- Err(..) => Err(internal("cannot process path in git with a non \
- unicode filename")),
+ Err(..) => Err(internal(
+ "cannot process path in git with a non \
+ unicode filename",
+ )),
}
}
}
- fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> CargoResult<bool>)
- -> CargoResult<Vec<PathBuf>> {
+ fn list_files_walk(
+ &self,
+ pkg: &Package,
+ filter: &mut FnMut(&Path) -> CargoResult<bool>,
+ ) -> CargoResult<Vec<PathBuf>> {
let mut ret = Vec::new();
PathSource::walk(pkg.root(), &mut ret, true, filter)?;
Ok(ret)
}
- fn walk(path: &Path, ret: &mut Vec<PathBuf>,
- is_root: bool, filter: &mut FnMut(&Path) -> CargoResult<bool>)
- -> CargoResult<()>
- {
+ fn walk(
+ path: &Path,
+ ret: &mut Vec<PathBuf>,
+ is_root: bool,
+ filter: &mut FnMut(&Path) -> CargoResult<bool>,
+ ) -> CargoResult<()> {
if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) {
if (*filter)(path)? {
ret.push(path.to_path_buf());
}
- return Ok(())
+ return Ok(());
}
// Don't recurse into any sub-packages that we have
if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() {
- return Ok(())
+ return Ok(());
}
// For package integration tests, we need to sort the paths in a deterministic order to
let name = path.file_name().and_then(|s| s.to_str());
// Skip dotfile directories
if name.map(|s| s.starts_with('.')) == Some(true) {
- continue
+ continue;
}
if is_root {
// Skip cargo artifacts
}
impl<'cfg> Registry for PathSource<'cfg> {
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()> {
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
for s in self.packages.iter().map(|p| p.summary()) {
if dep.matches(s) {
f(s.clone())
trace!("getting packages; id={}", id);
let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id);
- pkg.cloned().ok_or_else(|| {
- internal(format!("failed to find {} in path source", id))
- })
+ pkg.cloned()
+ .ok_or_else(|| internal(format!("failed to find {} in path source", id)))
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
// condition where this path was rm'ed - either way,
// we can ignore the error and treat the path's mtime
// as 0.
- let mtime = fs::metadata(&file).map(|meta| {
- FileTime::from_last_modification_time(&meta)
- }).unwrap_or(FileTime::zero());
+ let mtime = fs::metadata(&file)
+ .map(|meta| FileTime::from_last_modification_time(&meta))
+ .unwrap_or(FileTime::zero());
warn!("{} {}", mtime, file.display());
if mtime > max {
max = mtime;
use semver::Version;
use core::dependency::Dependency;
-use core::{SourceId, Summary, PackageId};
+use core::{PackageId, SourceId, Summary};
use sources::registry::{RegistryPackage, INDEX_LOCK};
use sources::registry::RegistryData;
-use util::{CargoResult, internal, Filesystem, Config};
+use util::{internal, CargoResult, Config, Filesystem};
pub struct RegistryIndex<'cfg> {
source_id: SourceId,
}
impl<'cfg> RegistryIndex<'cfg> {
- pub fn new(id: &SourceId,
- path: &Filesystem,
- config: &'cfg Config,
- locked: bool)
- -> RegistryIndex<'cfg> {
+ pub fn new(
+ id: &SourceId,
+ path: &Filesystem,
+ config: &'cfg Config,
+ locked: bool,
+ ) -> RegistryIndex<'cfg> {
RegistryIndex {
source_id: id.clone(),
path: path.clone(),
}
/// Return the hash listed for a specified PackageId.
- pub fn hash(&mut self,
- pkg: &PackageId,
- load: &mut RegistryData)
- -> CargoResult<String> {
+ pub fn hash(&mut self, pkg: &PackageId, load: &mut RegistryData) -> CargoResult<String> {
let name = &*pkg.name();
let version = pkg.version();
if let Some(s) = self.hashes.get(name).and_then(|v| v.get(version)) {
- return Ok(s.clone())
+ return Ok(s.clone());
}
// Ok, we're missing the key, so parse the index file to load it.
self.summaries(name, load)?;
- self.hashes.get(name).and_then(|v| v.get(version)).ok_or_else(|| {
- internal(format!("no hash listed for {}", pkg))
- }).map(|s| s.clone())
+ self.hashes
+ .get(name)
+ .and_then(|v| v.get(version))
+ .ok_or_else(|| internal(format!("no hash listed for {}", pkg)))
+ .map(|s| s.clone())
}
/// Parse the on-disk metadata for the package provided
///
/// Returns a list of pairs of (summary, yanked) for the package name
/// specified.
- pub fn summaries(&mut self,
- name: &str,
- load: &mut RegistryData)
- -> CargoResult<&Vec<(Summary, bool)>> {
+ pub fn summaries(
+ &mut self,
+ name: &str,
+ load: &mut RegistryData,
+ ) -> CargoResult<&Vec<(Summary, bool)>> {
if self.cache.contains_key(name) {
return Ok(&self.cache[name]);
}
Ok(&self.cache[name])
}
- fn load_summaries(&mut self,
- name: &str,
- load: &mut RegistryData)
- -> CargoResult<Vec<(Summary, bool)>> {
+ fn load_summaries(
+ &mut self,
+ name: &str,
+ load: &mut RegistryData,
+ ) -> CargoResult<Vec<(Summary, bool)>> {
let (root, _lock) = if self.locked {
- let lock = self.path.open_ro(Path::new(INDEX_LOCK),
- self.config,
- "the registry index");
+ let lock = self.path
+ .open_ro(Path::new(INDEX_LOCK), self.config, "the registry index");
match lock {
- Ok(lock) => {
- (lock.path().parent().unwrap().to_path_buf(), Some(lock))
- }
+ Ok(lock) => (lock.path().parent().unwrap().to_path_buf(), Some(lock)),
Err(_) => return Ok(Vec::new()),
}
} else {
(self.path.clone().into_path_unlocked(), None)
};
- let fs_name = name.chars().flat_map(|c| {
- c.to_lowercase()
- }).collect::<String>();
+ let fs_name = name.chars()
+ .flat_map(|c| c.to_lowercase())
+ .collect::<String>();
// see module comment for why this is structured the way it is
let path = match fs_name.len() {
let mut hit_closure = false;
let err = load.load(&root, Path::new(&path), &mut |contents| {
hit_closure = true;
- let contents = str::from_utf8(contents).map_err(|_| {
- format_err!("registry index file was not valid utf-8")
- })?;
+ let contents = str::from_utf8(contents)
+ .map_err(|_| format_err!("registry index file was not valid utf-8"))?;
ret.reserve(contents.lines().count());
- let lines = contents.lines()
- .map(|s| s.trim())
- .filter(|l| !l.is_empty());
+ let lines = contents.lines().map(|s| s.trim()).filter(|l| !l.is_empty());
let online = !self.config.cli_unstable().offline;
// Attempt forwards-compatibility on the index by ignoring
// interpretation of each line here and older cargo will simply
// ignore the new lines.
ret.extend(lines.filter_map(|line| {
- self.parse_registry_package(line).ok().and_then(|v|{
+ self.parse_registry_package(line).ok().and_then(|v| {
if online || load.is_crate_downloaded(v.0.package_id()) {
Some(v)
} else {
/// package.
///
/// The returned boolean is whether or not the summary has been yanked.
- fn parse_registry_package(&mut self, line: &str)
- -> CargoResult<(Summary, bool)> {
+ fn parse_registry_package(&mut self, line: &str) -> CargoResult<(Summary, bool)> {
let RegistryPackage {
- name, vers, cksum, deps, features, yanked, links
+ name,
+ vers,
+ cksum,
+ deps,
+ features,
+ yanked,
+ links,
} = super::DEFAULT_ID.set(&self.source_id, || {
serde_json::from_str::<RegistryPackage>(line)
})?;
if self.hashes.contains_key(&name[..]) {
self.hashes.get_mut(&name[..]).unwrap().insert(vers, cksum);
} else {
- self.hashes.entry(name.into_owned())
+ self.hashes
+ .entry(name.into_owned())
.or_insert_with(HashMap::new)
.insert(vers, cksum);
}
Ok((summary, yanked.unwrap_or(false)))
}
- pub fn query(&mut self,
- dep: &Dependency,
- load: &mut RegistryData,
- f: &mut FnMut(Summary))
- -> CargoResult<()> {
+ pub fn query(
+ &mut self,
+ dep: &Dependency,
+ load: &mut RegistryData,
+ f: &mut FnMut(Summary),
+ ) -> CargoResult<()> {
let source_id = self.source_id.clone();
let summaries = self.summaries(&*dep.name(), load)?;
- let summaries = summaries.iter().filter(|&&(_, yanked)| {
- dep.source_id().precise().is_some() || !yanked
- }).map(|s| s.0.clone());
+ let summaries = summaries
+ .iter()
+ .filter(|&&(_, yanked)| dep.source_id().precise().is_some() || !yanked)
+ .map(|s| s.0.clone());
// Handle `cargo update --precise` here. If specified, our own source
// will have a precise version listed of the form `<pkg>=<req>` where
// `<pkg>` is the name of a crate on this source and `<req>` is the
// version requested (argument to `--precise`).
- let summaries = summaries.filter(|s| {
- match source_id.precise() {
- Some(p) if p.starts_with(&*dep.name()) &&
- p[dep.name().len()..].starts_with('=') => {
- let vers = &p[dep.name().len() + 1..];
- s.version().to_string() == vers
- }
- _ => true,
+ let summaries = summaries.filter(|s| match source_id.precise() {
+ Some(p) if p.starts_with(&*dep.name()) && p[dep.name().len()..].starts_with('=') => {
+ let vers = &p[dep.name().len() + 1..];
+ s.version().to_string() == vers
}
+ _ => true,
});
for summary in summaries {
use core::PackageId;
use hex;
-use sources::registry::{RegistryData, RegistryConfig};
+use sources::registry::{RegistryConfig, RegistryData};
use util::FileLock;
use util::paths;
-use util::{Config, Sha256, Filesystem};
+use util::{Config, Filesystem, Sha256};
use util::errors::{CargoResult, CargoResultExt};
pub struct LocalRegistry<'cfg> {
}
impl<'cfg> LocalRegistry<'cfg> {
- pub fn new(root: &Path,
- config: &'cfg Config,
- name: &str) -> LocalRegistry<'cfg> {
+ pub fn new(root: &Path, config: &'cfg Config, name: &str) -> LocalRegistry<'cfg> {
LocalRegistry {
src_path: config.registry_source_path().join(name),
index_path: Filesystem::new(root.join("index")),
&self.index_path
}
- fn load(&self,
- root: &Path,
- path: &Path,
- data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()> {
+ fn load(
+ &self,
+ root: &Path,
+ path: &Path,
+ data: &mut FnMut(&[u8]) -> CargoResult<()>,
+ ) -> CargoResult<()> {
data(&paths::read_bytes(&root.join(path))?)
}
// these directories exist.
let root = self.root.clone().into_path_unlocked();
if !root.is_dir() {
- bail!("local registry path is not a directory: {}",
- root.display())
+ bail!("local registry path is not a directory: {}", root.display())
}
let index_path = self.index_path.clone().into_path_unlocked();
if !index_path.is_dir() {
- bail!("local registry index path is not a directory: {}",
- index_path.display())
+ bail!(
+ "local registry index path is not a directory: {}",
+ index_path.display()
+ )
}
Ok(())
}
- fn download(&mut self, pkg: &PackageId, checksum: &str)
- -> CargoResult<FileLock> {
+ fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> {
let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
- let mut crate_file = self.root.open_ro(&crate_file,
- self.config,
- "crate file")?;
+ let mut crate_file = self.root.open_ro(&crate_file, self.config, "crate file")?;
// If we've already got an unpacked version of this crate, then skip the
// checksum below as it is in theory already verified.
let dst = format!("{}-{}", pkg.name(), pkg.version());
if self.src_path.join(dst).into_path_unlocked().exists() {
- return Ok(crate_file)
+ return Ok(crate_file);
}
self.config.shell().status("Unpacking", pkg)?;
let mut state = Sha256::new();
let mut buf = [0; 64 * 1024];
loop {
- let n = crate_file.read(&mut buf).chain_err(|| {
- format!("failed to read `{}`", crate_file.path().display())
- })?;
+ let n = crate_file
+ .read(&mut buf)
+ .chain_err(|| format!("failed to read `{}`", crate_file.path().display()))?;
if n == 0 {
- break
+ break;
}
state.update(&buf[..n]);
}
use std::collections::BTreeMap;
use std::fmt;
use std::fs::File;
-use std::path::{PathBuf, Path};
+use std::path::{Path, PathBuf};
use flate2::read::GzDecoder;
use semver::Version;
use serde::de;
use tar::Archive;
-use core::{Source, SourceId, PackageId, Package, Summary, Registry};
+use core::{Package, PackageId, Registry, Source, SourceId, Summary};
use core::dependency::{Dependency, Kind};
use sources::PathSource;
-use util::{CargoResult, Config, internal, FileLock, Filesystem};
+use util::{internal, CargoResult, Config, FileLock, Filesystem};
use util::errors::CargoResultExt;
use util::hex;
use util::to_url::ToUrl;
features: BTreeMap<String, Vec<String>>,
cksum: String,
yanked: Option<bool>,
- #[serde(default)]
- links: Option<String>,
+ #[serde(default)] links: Option<String>,
}
struct DependencyList {
pub trait RegistryData {
fn index_path(&self) -> &Filesystem;
- fn load(&self,
- _root: &Path,
- path: &Path,
- data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()>;
+ fn load(
+ &self,
+ _root: &Path,
+ path: &Path,
+ data: &mut FnMut(&[u8]) -> CargoResult<()>,
+ ) -> CargoResult<()>;
fn config(&mut self) -> CargoResult<Option<RegistryConfig>>;
fn update_index(&mut self) -> CargoResult<()>;
- fn download(&mut self,
- pkg: &PackageId,
- checksum: &str) -> CargoResult<FileLock>;
+ fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock>;
- fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool { true }
+ fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool {
+ true
+ }
}
mod index;
}
impl<'cfg> RegistrySource<'cfg> {
- pub fn remote(source_id: &SourceId,
- config: &'cfg Config) -> RegistrySource<'cfg> {
+ pub fn remote(source_id: &SourceId, config: &'cfg Config) -> RegistrySource<'cfg> {
let name = short_name(source_id);
let ops = remote::RemoteRegistry::new(source_id, config, &name);
RegistrySource::new(source_id, config, &name, Box::new(ops), true)
}
- pub fn local(source_id: &SourceId,
- path: &Path,
- config: &'cfg Config) -> RegistrySource<'cfg> {
+ pub fn local(source_id: &SourceId, path: &Path, config: &'cfg Config) -> RegistrySource<'cfg> {
let name = short_name(source_id);
let ops = local::LocalRegistry::new(path, config, &name);
RegistrySource::new(source_id, config, &name, Box::new(ops), false)
}
- fn new(source_id: &SourceId,
- config: &'cfg Config,
- name: &str,
- ops: Box<RegistryData + 'cfg>,
- index_locked: bool) -> RegistrySource<'cfg> {
+ fn new(
+ source_id: &SourceId,
+ config: &'cfg Config,
+ name: &str,
+ ops: Box<RegistryData + 'cfg>,
+ index_locked: bool,
+ ) -> RegistrySource<'cfg> {
RegistrySource {
src_path: config.registry_source_path().join(name),
config,
source_id: source_id.clone(),
updated: false,
- index: index::RegistryIndex::new(source_id,
- ops.index_path(),
- config,
- index_locked),
+ index: index::RegistryIndex::new(source_id, ops.index_path(), config, index_locked),
index_locked,
ops,
}
/// compiled.
///
/// No action is taken if the source looks like it's already unpacked.
- fn unpack_package(&self,
- pkg: &PackageId,
- tarball: &FileLock)
- -> CargoResult<PathBuf> {
- let dst = self.src_path.join(&format!("{}-{}", pkg.name(),
- pkg.version()));
+ fn unpack_package(&self, pkg: &PackageId, tarball: &FileLock) -> CargoResult<PathBuf> {
+ let dst = self.src_path
+ .join(&format!("{}-{}", pkg.name(), pkg.version()));
dst.create_dir()?;
// Note that we've already got the `tarball` locked above, and that
// implies a lock on the unpacked destination as well, so this access
let dst = dst.into_path_unlocked();
let ok = dst.join(".cargo-ok");
if ok.exists() {
- return Ok(dst)
+ return Ok(dst);
}
let gz = GzDecoder::new(tarball.file());
let parent = dst.parent().unwrap();
for entry in tar.entries()? {
let mut entry = entry.chain_err(|| "failed to iterate over archive")?;
- let entry_path = entry.path()
+ let entry_path = entry
+ .path()
.chain_err(|| "failed to read entry path")?
.into_owned();
// crates.io should also block uploads with these sorts of tarballs,
// but be extra sure by adding a check here as well.
if !entry_path.starts_with(prefix) {
- bail!("invalid tarball downloaded, contains \
- a file at {:?} which isn't under {:?}",
- entry_path, prefix)
+ bail!(
+ "invalid tarball downloaded, contains \
+ a file at {:?} which isn't under {:?}",
+ entry_path,
+ prefix
+ )
}
// Once that's verified, unpack the entry as usual.
- entry.unpack_in(parent).chain_err(|| {
- format!("failed to unpack entry at `{}`", entry_path.display())
- })?;
+ entry
+ .unpack_in(parent)
+ .chain_err(|| format!("failed to unpack entry at `{}`", entry_path.display()))?;
}
File::create(&ok)?;
Ok(dst.clone())
fn do_update(&mut self) -> CargoResult<()> {
self.ops.update_index()?;
let path = self.ops.index_path();
- self.index = index::RegistryIndex::new(&self.source_id,
- path,
- self.config,
- self.index_locked);
+ self.index =
+ index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked);
Ok(())
}
}
impl<'cfg> Registry for RegistrySource<'cfg> {
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()> {
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
// If this is a precise dependency, then it came from a lockfile and in
// theory the registry is known to contain this version. If, however, we
// come back with no summaries, then our registry may need to be
f(s);
})?;
if called {
- return Ok(())
+ return Ok(());
} else {
self.do_update()?;
}
fn download(&mut self, package: &PackageId) -> CargoResult<Package> {
let hash = self.index.hash(package, &mut *self.ops)?;
let path = self.ops.download(package, &hash)?;
- let path = self.unpack_package(package, &path).chain_err(|| {
- internal(format!("failed to unpack package `{}`", package))
- })?;
+ let path = self.unpack_package(package, &path)
+ .chain_err(|| internal(format!("failed to unpack package `{}`", package)))?;
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = src.download(package)?;
// *summary* loaded from the Cargo.toml we just downloaded with the one
// we loaded from the index.
let summaries = self.index.summaries(&*package.name(), &mut *self.ops)?;
- let summary = summaries.iter().map(|s| &s.0).find(|s| {
- s.package_id() == package
- }).expect("summary not found");
+ let summary = summaries
+ .iter()
+ .map(|s| &s.0)
+ .find(|s| s.package_id() == package)
+ .expect("summary not found");
let mut manifest = pkg.manifest().clone();
manifest.set_summary(summary.clone());
Ok(Package::new(manifest, pkg.manifest_path()))
impl<'de> de::Deserialize<'de> for DependencyList {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where D: de::Deserializer<'de>,
+ where
+ D: de::Deserializer<'de>,
{
return deserializer.deserialize_seq(Visitor);
}
fn visit_seq<A>(self, mut seq: A) -> Result<DependencyList, A::Error>
- where A: de::SeqAccess<'de>,
+ where
+ A: de::SeqAccess<'de>,
{
let mut ret = Vec::new();
if let Some(size) = seq.size_hint() {
ret.reserve(size);
}
while let Some(element) = seq.next_element::<RegistryDependency>()? {
- ret.push(parse_registry_dependency(element).map_err(|e| {
- de::Error::custom(e)
- })?);
+ ret.push(parse_registry_dependency(element).map_err(|e| de::Error::custom(e))?);
}
Ok(DependencyList { inner: ret })
}
/// Converts an encoded dependency in the registry to a cargo dependency
-fn parse_registry_dependency(dep: RegistryDependency)
- -> CargoResult<Dependency> {
+fn parse_registry_dependency(dep: RegistryDependency) -> CargoResult<Dependency> {
let RegistryDependency {
- name, req, mut features, optional, default_features, target, kind, registry
+ name,
+ req,
+ mut features,
+ optional,
+ default_features,
+ target,
+ kind,
+ registry,
} = dep;
let id = if let Some(registry) = registry {
SourceId::for_registry(®istry.to_url()?)?
} else {
- DEFAULT_ID.with(|id| {
- id.clone()
- })
+ DEFAULT_ID.with(|id| id.clone())
};
let mut dep = Dependency::parse_no_deprecated(&name, Some(&req), &id)?;
features.retain(|s| !s.is_empty());
dep.set_optional(optional)
- .set_default_features(default_features)
- .set_features(features)
- .set_platform(platform)
- .set_kind(kind);
+ .set_default_features(default_features)
+ .set_features(features)
+ .set_platform(platform)
+ .set_kind(kind);
Ok(dep)
}
-use std::cell::{RefCell, Ref, Cell};
+use std::cell::{Cell, Ref, RefCell};
use std::fmt::Write as FmtWrite;
use std::io::SeekFrom;
use std::io::prelude::*;
use core::{PackageId, SourceId};
use sources::git;
-use sources::registry::{RegistryData, RegistryConfig, INDEX_LOCK, CRATE_TEMPLATE, VERSION_TEMPLATE};
+use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE};
use util::network;
use util::{FileLock, Filesystem};
-use util::{Config, Sha256, ToUrl, Progress};
+use util::{Config, Progress, Sha256, ToUrl};
use util::errors::{CargoResult, CargoResultExt, HttpNot200};
pub struct RemoteRegistry<'cfg> {
}
impl<'cfg> RemoteRegistry<'cfg> {
- pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str)
- -> RemoteRegistry<'cfg> {
+ pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> {
RemoteRegistry {
index_path: config.registry_index_path().join(name),
cache_path: config.registry_cache_path().join(name),
// Fast path without a lock
if let Ok(repo) = git2::Repository::open(&path) {
- return Ok(repo)
+ return Ok(repo);
}
// Ok, now we need to lock and try the whole thing over again.
- let lock = self.index_path.open_rw(Path::new(INDEX_LOCK),
- self.config,
- "the registry index")?;
+ let lock =
+ self.index_path
+ .open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?;
match git2::Repository::open(&path) {
Ok(repo) => Ok(repo),
Err(_) => {
{
let tree = self.tree.borrow();
if tree.is_some() {
- return Ok(Ref::map(tree, |s| s.as_ref().unwrap()))
+ return Ok(Ref::map(tree, |s| s.as_ref().unwrap()));
}
}
let repo = self.repo()?;
// (`RemoteRegistry`) so we then just need to ensure that the tree is
// destroyed first in the destructor, hence the destructor on
// `RemoteRegistry` below.
- let tree = unsafe {
- mem::transmute::<git2::Tree, git2::Tree<'static>>(tree)
- };
+ let tree = unsafe { mem::transmute::<git2::Tree, git2::Tree<'static>>(tree) };
*self.tree.borrow_mut() = Some(tree);
Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap()))
}
&self.index_path
}
- fn load(&self,
- _root: &Path,
- path: &Path,
- data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()> {
+ fn load(
+ &self,
+ _root: &Path,
+ path: &Path,
+ data: &mut FnMut(&[u8]) -> CargoResult<()>,
+ ) -> CargoResult<()> {
// Note that the index calls this method and the filesystem is locked
// in the index, so we don't need to worry about an `update_index`
// happening in a different process.
fn config(&mut self) -> CargoResult<Option<RegistryConfig>> {
self.repo()?; // create intermediate dirs and initialize the repo
- let _lock = self.index_path.open_ro(Path::new(INDEX_LOCK),
- self.config,
- "the registry index")?;
+ let _lock =
+ self.index_path
+ .open_ro(Path::new(INDEX_LOCK), self.config, "the registry index")?;
let mut config = None;
self.load(Path::new(""), Path::new("config.json"), &mut |json| {
config = Some(serde_json::from_slice(json)?);
self.repo()?;
self.head.set(None);
*self.tree.borrow_mut() = None;
- let _lock = self.index_path.open_rw(Path::new(INDEX_LOCK),
- self.config,
- "the registry index")?;
- self.config.shell().status("Updating", self.source_id.display_registry())?;
+ let _lock =
+ self.index_path
+ .open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?;
+ self.config
+ .shell()
+ .status("Updating", self.source_id.display_registry())?;
// git fetch origin master
let url = self.source_id.url();
let refspec = "refs/heads/master:refs/remotes/origin/master";
let repo = self.repo.borrow_mut().unwrap();
- git::fetch(repo, url, refspec, self.config).chain_err(|| {
- format!("failed to fetch `{}`", url)
- })?;
+ git::fetch(repo, url, refspec, self.config)
+ .chain_err(|| format!("failed to fetch `{}`", url))?;
Ok(())
}
- fn download(&mut self, pkg: &PackageId, checksum: &str)
- -> CargoResult<FileLock> {
+ fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> {
let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
let path = Path::new(&filename);
if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) {
let meta = dst.file().metadata()?;
if meta.len() > 0 {
- return Ok(dst)
+ return Ok(dst);
}
}
let mut dst = self.cache_path.open_rw(path, self.config, &filename)?;
let meta = dst.file().metadata()?;
if meta.len() > 0 {
- return Ok(dst)
+ return Ok(dst);
}
self.config.shell().status("Downloading", pkg)?;
if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) {
write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap();
}
- let url = url
- .replace(CRATE_TEMPLATE, &*pkg.name())
+ let url = url.replace(CRATE_TEMPLATE, &*pkg.name())
.replace(VERSION_TEMPLATE, &pkg.version().to_string())
.to_url()?;
let code = handle.response_code()?;
if code != 200 && code != 0 {
let url = handle.effective_url()?.unwrap_or(&url);
- Err(HttpNot200 { code, url: url.to_string() }.into())
+ Err(HttpNot200 {
+ code,
+ url: url.to_string(),
+ }.into())
} else {
Ok(())
}
Ok(dst)
}
-
fn is_crate_downloaded(&self, pkg: &PackageId) -> bool {
let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
let path = Path::new(&filename);
if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) {
- if let Ok(meta) = dst.file().metadata(){
+ if let Ok(meta) = dst.file().metadata() {
return meta.len() > 0;
}
}
false
}
-
}
impl<'cfg> Drop for RemoteRegistry<'cfg> {
-use core::{Source, Registry, PackageId, Package, Dependency, Summary, SourceId};
+use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary};
use util::errors::{CargoResult, CargoResultExt};
pub struct ReplacedSource<'cfg> {
}
impl<'cfg> ReplacedSource<'cfg> {
- pub fn new(to_replace: &SourceId,
- replace_with: &SourceId,
- src: Box<Source + 'cfg>) -> ReplacedSource<'cfg> {
+ pub fn new(
+ to_replace: &SourceId,
+ replace_with: &SourceId,
+ src: Box<Source + 'cfg>,
+ ) -> ReplacedSource<'cfg> {
ReplacedSource {
to_replace: to_replace.clone(),
replace_with: replace_with.clone(),
}
impl<'cfg> Registry for ReplacedSource<'cfg> {
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()> {
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let (replace_with, to_replace) = (&self.replace_with, &self.to_replace);
let dep = dep.clone().map_source(to_replace, replace_with);
- self.inner.query(&dep, &mut |summary| {
- f(summary.map_source(replace_with, to_replace))
- }).chain_err(|| {
- format!("failed to query replaced source {}",
- self.to_replace)
- })?;
+ self.inner
+ .query(&dep, &mut |summary| {
+ f(summary.map_source(replace_with, to_replace))
+ })
+ .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?;
Ok(())
}
}
fn update(&mut self) -> CargoResult<()> {
- self.inner.update().chain_err(|| {
- format!("failed to update replaced source {}",
- self.to_replace)
- })?;
+ self.inner
+ .update()
+ .chain_err(|| format!("failed to update replaced source {}", self.to_replace))?;
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
let id = id.with_source_id(&self.replace_with);
- let pkg = self.inner.download(&id).chain_err(|| {
- format!("failed to download replaced source {}",
- self.to_replace)
- })?;
+ let pkg = self.inner
+ .download(&id)
+ .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
Ok(pkg.map_source(&self.replace_with, &self.to_replace))
}
let mut p = Parser::new(s);
let e = p.expr()?;
if p.t.next().is_some() {
- bail!("can only have one cfg-expression, consider using all() or \
- any() explicitly")
+ bail!(
+ "can only have one cfg-expression, consider using all() or \
+ any() explicitly"
+ )
}
Ok(e)
}
fn expr(&mut self) -> CargoResult<CfgExpr> {
match self.t.peek() {
- Some(&Ok(Token::Ident(op @ "all"))) |
- Some(&Ok(Token::Ident(op @ "any"))) => {
+ Some(&Ok(Token::Ident(op @ "all"))) | Some(&Ok(Token::Ident(op @ "any"))) => {
self.t.next();
let mut e = Vec::new();
self.eat(Token::LeftParen)?;
e.push(self.expr()?);
if !self.try(Token::Comma) {
self.eat(Token::RightParen)?;
- break
+ break;
}
}
if op == "all" {
Ok(CfgExpr::Not(Box::new(e)))
}
Some(&Ok(..)) => self.cfg().map(CfgExpr::Value),
- Some(&Err(..)) => {
- Err(self.t.next().unwrap().err().unwrap())
- }
- None => bail!("expected start of a cfg expression, \
- found nothing"),
+ Some(&Err(..)) => Err(self.t.next().unwrap().err().unwrap()),
+ None => bail!(
+ "expected start of a cfg expression, \
+ found nothing"
+ ),
}
}
let e = if self.try(Token::Equals) {
let val = match self.t.next() {
Some(Ok(Token::String(s))) => s,
- Some(Ok(t)) => bail!("expected a string, found {}",
- t.classify()),
+ Some(Ok(t)) => bail!("expected a string, found {}", t.classify()),
Some(Err(e)) => return Err(e),
None => bail!("expected a string, found nothing"),
};
fn eat(&mut self, token: Token<'a>) -> CargoResult<()> {
match self.t.next() {
Some(Ok(ref t)) if token == *t => Ok(()),
- Some(Ok(t)) => bail!("expected {}, found {}", token.classify(),
- t.classify()),
+ Some(Ok(t)) => bail!("expected {}, found {}", token.classify(), t.classify()),
Some(Err(e)) => Err(e),
None => bail!("expected {}, but cfg expr ended", token.classify()),
}
Some((start, '"')) => {
while let Some((end, ch)) = self.s.next() {
if ch == '"' {
- return Some(Ok(Token::String(&self.orig[start+1..end])))
+ return Some(Ok(Token::String(&self.orig[start + 1..end])));
}
}
- return Some(Err(format_err!("unterminated string in cfg")))
+ return Some(Err(format_err!("unterminated string in cfg")));
}
Some((start, ch)) if is_ident_start(ch) => {
while let Some(&(end, ch)) = self.s.peek() {
if !is_ident_rest(ch) {
- return Some(Ok(Token::Ident(&self.orig[start..end])))
+ return Some(Ok(Token::Ident(&self.orig[start..end])));
} else {
self.s.next();
}
}
- return Some(Ok(Token::Ident(&self.orig[start..])))
+ return Some(Ok(Token::Ident(&self.orig[start..])));
}
Some((_, ch)) => {
- return Some(Err(format_err!("unexpected character in \
- cfg `{}`, expected parens, \
- a comma, an identifier, or \
- a string", ch)))
+ return Some(Err(format_err!(
+ "unexpected character in \
+ cfg `{}`, expected parens, \
+ a comma, an identifier, or \
+ a string",
+ ch
+ )))
}
- None => return None
+ None => return None,
}
}
}
use lazycell::LazyCell;
use core::shell::Verbosity;
-use core::{Shell, CliUnstable, SourceId};
+use core::{CliUnstable, Shell, SourceId};
use ops;
use url::Url;
use util::ToUrl;
use util::Rustc;
-use util::errors::{CargoResult, CargoResultExt, CargoError, internal};
+use util::errors::{internal, CargoError, CargoResult, CargoResultExt};
use util::paths;
use util::toml as cargo_toml;
use util::Filesystem;
}
impl Config {
- pub fn new(shell: Shell,
- cwd: PathBuf,
- homedir: PathBuf) -> Config {
+ pub fn new(shell: Shell, cwd: PathBuf, homedir: PathBuf) -> Config {
static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _;
static INIT: Once = ONCE_INIT;
pub fn default() -> CargoResult<Config> {
let shell = Shell::new();
- let cwd = env::current_dir().chain_err(|| {
- "couldn't get the current directory of the process"
- })?;
+ let cwd =
+ env::current_dir().chain_err(|| "couldn't get the current directory of the process")?;
let homedir = homedir(&cwd).ok_or_else(|| {
- format_err!("Cargo couldn't find your home directory. \
- This probably means that $HOME was not set.")
+ format_err!(
+ "Cargo couldn't find your home directory. \
+ This probably means that $HOME was not set."
+ )
})?;
Ok(Config::new(shell, cwd, homedir))
}
/// The user's cargo home directory (OS-dependent)
- pub fn home(&self) -> &Filesystem { &self.home_path }
+ pub fn home(&self) -> &Filesystem {
+ &self.home_path
+ }
/// The cargo git directory (`<cargo_home>/git`)
pub fn git_path(&self) -> Filesystem {
/// Get the path to the `rustdoc` executable
pub fn rustdoc(&self) -> CargoResult<&Path> {
- self.rustdoc.try_borrow_with(|| self.get_tool("rustdoc")).map(AsRef::as_ref)
+ self.rustdoc
+ .try_borrow_with(|| self.get_tool("rustdoc"))
+ .map(AsRef::as_ref)
}
/// Get the path to the `rustc` executable
pub fn rustc(&self) -> CargoResult<&Rustc> {
- self.rustc.try_borrow_with(|| Rustc::new(self.get_tool("rustc")?,
- self.maybe_get_tool("rustc_wrapper")?))
+ self.rustc.try_borrow_with(|| {
+ Rustc::new(
+ self.get_tool("rustc")?,
+ self.maybe_get_tool("rustc_wrapper")?,
+ )
+ })
}
/// Get the path to the `cargo` executable
pub fn cargo_exe(&self) -> CargoResult<&Path> {
- self.cargo_exe.try_borrow_with(|| {
- fn from_current_exe() -> CargoResult<PathBuf> {
- // Try fetching the path to `cargo` using env::current_exe().
- // The method varies per operating system and might fail; in particular,
- // it depends on /proc being mounted on Linux, and some environments
- // (like containers or chroots) may not have that available.
- let exe = env::current_exe()?.canonicalize()?;
- Ok(exe)
- }
-
- fn from_argv() -> CargoResult<PathBuf> {
- // Grab argv[0] and attempt to resolve it to an absolute path.
- // If argv[0] has one component, it must have come from a PATH lookup,
- // so probe PATH in that case.
- // Otherwise, it has multiple components and is either:
- // - a relative path (e.g. `./cargo`, `target/debug/cargo`), or
- // - an absolute path (e.g. `/usr/local/bin/cargo`).
- // In either case, Path::canonicalize will return the full absolute path
- // to the target if it exists
- let argv0 = env::args_os()
- .map(PathBuf::from)
- .next()
- .ok_or(format_err!("no argv[0]"))?;
- if argv0.components().count() == 1 {
- probe_path(argv0)
- } else {
- Ok(argv0.canonicalize()?)
+ self.cargo_exe
+ .try_borrow_with(|| {
+ fn from_current_exe() -> CargoResult<PathBuf> {
+ // Try fetching the path to `cargo` using env::current_exe().
+ // The method varies per operating system and might fail; in particular,
+ // it depends on /proc being mounted on Linux, and some environments
+ // (like containers or chroots) may not have that available.
+ let exe = env::current_exe()?.canonicalize()?;
+ Ok(exe)
}
- }
- fn probe_path(argv0: PathBuf) -> CargoResult<PathBuf> {
- let paths = env::var_os("PATH").ok_or(format_err!("no PATH"))?;
- for path in env::split_paths(&paths) {
- let candidate = PathBuf::from(path).join(&argv0);
- if candidate.is_file() {
- // PATH may have a component like "." in it, so we still need to
- // canonicalize.
- return Ok(candidate.canonicalize()?)
+ fn from_argv() -> CargoResult<PathBuf> {
+ // Grab argv[0] and attempt to resolve it to an absolute path.
+ // If argv[0] has one component, it must have come from a PATH lookup,
+ // so probe PATH in that case.
+ // Otherwise, it has multiple components and is either:
+ // - a relative path (e.g. `./cargo`, `target/debug/cargo`), or
+ // - an absolute path (e.g. `/usr/local/bin/cargo`).
+ // In either case, Path::canonicalize will return the full absolute path
+ // to the target if it exists
+ let argv0 = env::args_os()
+ .map(PathBuf::from)
+ .next()
+ .ok_or(format_err!("no argv[0]"))?;
+ if argv0.components().count() == 1 {
+ probe_path(argv0)
+ } else {
+ Ok(argv0.canonicalize()?)
}
}
- bail!("no cargo executable candidate found in PATH")
- }
+ fn probe_path(argv0: PathBuf) -> CargoResult<PathBuf> {
+ let paths = env::var_os("PATH").ok_or(format_err!("no PATH"))?;
+ for path in env::split_paths(&paths) {
+ let candidate = PathBuf::from(path).join(&argv0);
+ if candidate.is_file() {
+ // PATH may have a component like "." in it, so we still need to
+ // canonicalize.
+ return Ok(candidate.canonicalize()?);
+ }
+ }
+
+ bail!("no cargo executable candidate found in PATH")
+ }
- let exe = from_current_exe()
- .or_else(|_| from_argv())
- .chain_err(|| "couldn't get the path to cargo executable")?;
- Ok(exe)
- }).map(AsRef::as_ref)
+ let exe = from_current_exe()
+ .or_else(|_| from_argv())
+ .chain_err(|| "couldn't get the path to cargo executable")?;
+ Ok(exe)
+ })
+ .map(AsRef::as_ref)
}
pub fn values(&self) -> CargoResult<&HashMap<String, ConfigValue>> {
}
}
- pub fn cwd(&self) -> &Path { &self.cwd }
+ pub fn cwd(&self) -> &Path {
+ &self.cwd
+ }
pub fn target_dir(&self) -> CargoResult<Option<Filesystem>> {
if let Some(dir) = env::var_os("CARGO_TARGET_DIR") {
None => return Ok(None),
}
}
- CV::Integer(_, ref path) |
- CV::String(_, ref path) |
- CV::List(_, ref path) |
- CV::Boolean(_, ref path) => {
- let idx = key.split('.').take(i)
- .fold(0, |n, s| n + s.len()) + i - 1;
+ CV::Integer(_, ref path)
+ | CV::String(_, ref path)
+ | CV::List(_, ref path)
+ | CV::Boolean(_, ref path) => {
+ let idx = key.split('.').take(i).fold(0, |n, s| n + s.len()) + i - 1;
let key_so_far = &key[..idx];
- bail!("expected table for configuration key `{}`, \
- but found {} in {}",
- key_so_far, val.desc(), path.display())
+ bail!(
+ "expected table for configuration key `{}`, \
+ but found {} in {}",
+ key_so_far,
+ val.desc(),
+ path.display()
+ )
}
}
}
}
fn get_env<V: FromStr>(&self, key: &str) -> CargoResult<Option<Value<V>>>
- where CargoError: From<V::Err>
+ where
+ CargoError: From<V::Err>,
{
let key = key.replace(".", "_")
- .replace("-", "_")
- .chars()
- .flat_map(|c| c.to_uppercase())
- .collect::<String>();
+ .replace("-", "_")
+ .chars()
+ .flat_map(|c| c.to_uppercase())
+ .collect::<String>();
match env::var(&format!("CARGO_{}", key)) {
- Ok(value) => {
- Ok(Some(Value {
- val: value.parse()?,
- definition: Definition::Environment,
- }))
- }
+ Ok(value) => Ok(Some(Value {
+ val: value.parse()?,
+ definition: Definition::Environment,
+ })),
Err(..) => Ok(None),
}
}
pub fn get_string(&self, key: &str) -> CargoResult<Option<Value<String>>> {
if let Some(v) = self.get_env(key)? {
- return Ok(Some(v))
+ return Ok(Some(v));
}
match self.get(key)? {
- Some(CV::String(i, path)) => {
- Ok(Some(Value {
- val: i,
- definition: Definition::Path(path),
- }))
- }
+ Some(CV::String(i, path)) => Ok(Some(Value {
+ val: i,
+ definition: Definition::Path(path),
+ })),
Some(val) => self.expected("string", key, val),
None => Ok(None),
}
pub fn get_bool(&self, key: &str) -> CargoResult<Option<Value<bool>>> {
if let Some(v) = self.get_env(key)? {
- return Ok(Some(v))
+ return Ok(Some(v));
}
match self.get(key)? {
- Some(CV::Boolean(b, path)) => {
- Ok(Some(Value {
- val: b,
- definition: Definition::Path(path),
- }))
- }
+ Some(CV::Boolean(b, path)) => Ok(Some(Value {
+ val: b,
+ definition: Definition::Path(path),
+ })),
Some(val) => self.expected("bool", key, val),
None => Ok(None),
}
}
fn string_to_path(&self, value: String, definition: &Definition) -> PathBuf {
- let is_path = value.contains('/') ||
- (cfg!(windows) && value.contains('\\'));
+ let is_path = value.contains('/') || (cfg!(windows) && value.contains('\\'));
if is_path {
definition.root(self).join(value)
} else {
if let Some(val) = self.get_string(key)? {
Ok(Some(Value {
val: self.string_to_path(val.val, &val.definition),
- definition: val.definition
+ definition: val.definition,
}))
} else {
Ok(None)
}
}
- pub fn get_path_and_args(&self, key: &str)
- -> CargoResult<Option<Value<(PathBuf, Vec<String>)>>> {
+ pub fn get_path_and_args(
+ &self,
+ key: &str,
+ ) -> CargoResult<Option<Value<(PathBuf, Vec<String>)>>> {
if let Some(mut val) = self.get_list_or_split_string(key)? {
if !val.val.is_empty() {
return Ok(Some(Value {
- val: (self.string_to_path(val.val.remove(0), &val.definition), val.val),
- definition: val.definition
+ val: (
+ self.string_to_path(val.val.remove(0), &val.definition),
+ val.val,
+ ),
+ definition: val.definition,
}));
}
}
Ok(None)
}
- pub fn get_list(&self, key: &str)
- -> CargoResult<Option<Value<Vec<(String, PathBuf)>>>> {
+ pub fn get_list(&self, key: &str) -> CargoResult<Option<Value<Vec<(String, PathBuf)>>>> {
match self.get(key)? {
- Some(CV::List(i, path)) => {
- Ok(Some(Value {
- val: i,
- definition: Definition::Path(path),
- }))
- }
+ Some(CV::List(i, path)) => Ok(Some(Value {
+ val: i,
+ definition: Definition::Path(path),
+ })),
Some(val) => self.expected("list", key, val),
None => Ok(None),
}
}
- pub fn get_list_or_split_string(&self, key: &str)
- -> CargoResult<Option<Value<Vec<String>>>> {
+ pub fn get_list_or_split_string(&self, key: &str) -> CargoResult<Option<Value<Vec<String>>>> {
match self.get_env::<String>(key) {
- Ok(Some(value)) =>
+ Ok(Some(value)) => {
return Ok(Some(Value {
val: value.val.split(' ').map(str::to_string).collect(),
- definition: value.definition
- })),
+ definition: value.definition,
+ }))
+ }
Err(err) => return Err(err),
Ok(None) => (),
}
match self.get(key)? {
- Some(CV::List(i, path)) => {
- Ok(Some(Value {
- val: i.into_iter().map(|(s, _)| s).collect(),
- definition: Definition::Path(path),
- }))
- }
- Some(CV::String(i, path)) => {
- Ok(Some(Value {
- val: i.split(' ').map(str::to_string).collect(),
- definition: Definition::Path(path),
- }))
- }
+ Some(CV::List(i, path)) => Ok(Some(Value {
+ val: i.into_iter().map(|(s, _)| s).collect(),
+ definition: Definition::Path(path),
+ })),
+ Some(CV::String(i, path)) => Ok(Some(Value {
+ val: i.split(' ').map(str::to_string).collect(),
+ definition: Definition::Path(path),
+ })),
Some(val) => self.expected("list or string", key, val),
None => Ok(None),
}
}
- pub fn get_table(&self, key: &str)
- -> CargoResult<Option<Value<HashMap<String, CV>>>> {
+ pub fn get_table(&self, key: &str) -> CargoResult<Option<Value<HashMap<String, CV>>>> {
match self.get(key)? {
- Some(CV::Table(i, path)) => {
- Ok(Some(Value {
- val: i,
- definition: Definition::Path(path),
- }))
- }
+ Some(CV::Table(i, path)) => Ok(Some(Value {
+ val: i,
+ definition: Definition::Path(path),
+ })),
Some(val) => self.expected("table", key, val),
None => Ok(None),
}
pub fn get_i64(&self, key: &str) -> CargoResult<Option<Value<i64>>> {
if let Some(v) = self.get_env(key)? {
- return Ok(Some(v))
+ return Ok(Some(v));
}
match self.get(key)? {
- Some(CV::Integer(i, path)) => {
- Ok(Some(Value {
- val: i,
- definition: Definition::Path(path),
- }))
- }
+ Some(CV::Integer(i, path)) => Ok(Some(Value {
+ val: i,
+ definition: Definition::Path(path),
+ })),
Some(val) => self.expected("integer", key, val),
None => Ok(None),
}
Some(v) => {
let value = v.val;
if value < 0 {
- bail!("net.retry must be positive, but found {} in {}",
- v.val, v.definition)
+ bail!(
+ "net.retry must be positive, but found {} in {}",
+ v.val,
+ v.definition
+ )
} else {
Ok(value)
}
}
pub fn expected<T>(&self, ty: &str, key: &str, val: CV) -> CargoResult<T> {
- val.expected(ty, key).map_err(|e| {
- format_err!("invalid configuration for key `{}`\n{}", key, e)
- })
+ val.expected(ty, key)
+ .map_err(|e| format_err!("invalid configuration for key `{}`\n{}", key, e))
}
- pub fn configure(&mut self,
- verbose: u32,
- quiet: Option<bool>,
- color: &Option<String>,
- frozen: bool,
- locked: bool,
- unstable_flags: &[String]) -> CargoResult<()> {
+ pub fn configure(
+ &mut self,
+ verbose: u32,
+ quiet: Option<bool>,
+ color: &Option<String>,
+ frozen: bool,
+ locked: bool,
+ unstable_flags: &[String],
+ ) -> CargoResult<()> {
let extra_verbose = verbose >= 2;
- let verbose = if verbose == 0 {None} else {Some(true)};
+ let verbose = if verbose == 0 { None } else { Some(true) };
// Ignore errors in the configuration files.
let cfg_verbose = self.get_bool("term.verbose").unwrap_or(None).map(|v| v.val);
let color = color.as_ref().or_else(|| cfg_color.as_ref());
let verbosity = match (verbose, cfg_verbose, quiet) {
- (Some(true), _, None) |
- (None, Some(true), None) => Verbosity::Verbose,
+ (Some(true), _, None) | (None, Some(true), None) => Verbosity::Verbose,
// command line takes precedence over configuration, so ignore the
// configuration.
// Can't actually get `Some(false)` as a value from the command
// line, so just ignore them here to appease exhaustiveness checking
// in match statements.
- (Some(false), _, _) |
- (_, _, Some(false)) |
-
- (None, Some(false), None) |
- (None, None, None) => Verbosity::Normal,
+ (Some(false), _, _)
+ | (_, _, Some(false))
+ | (None, Some(false), None)
+ | (None, None, None) => Verbosity::Normal,
};
self.shell().set_verbosity(verbosity);
walk_tree(&self.cwd, |path| {
let mut contents = String::new();
let mut file = File::open(&path)?;
- file.read_to_string(&mut contents).chain_err(|| {
- format!("failed to read configuration file `{}`",
- path.display())
- })?;
- let toml = cargo_toml::parse(&contents,
- path,
- self).chain_err(|| {
- format!("could not parse TOML configuration in `{}`",
- path.display())
+ file.read_to_string(&mut contents)
+ .chain_err(|| format!("failed to read configuration file `{}`", path.display()))?;
+ let toml = cargo_toml::parse(&contents, path, self).chain_err(|| {
+ format!("could not parse TOML configuration in `{}`", path.display())
})?;
let value = CV::from_toml(path, toml).chain_err(|| {
- format!("failed to load TOML configuration from `{}`",
- path.display())
- })?;
- cfg.merge(value).chain_err(|| {
- format!("failed to merge configuration at `{}`", path.display())
+ format!(
+ "failed to load TOML configuration from `{}`",
+ path.display()
+ )
})?;
+ cfg.merge(value)
+ .chain_err(|| format!("failed to merge configuration at `{}`", path.display()))?;
Ok(())
}).chain_err(|| "Couldn't load Cargo configuration")?;
/// Gets the index for a registry.
pub fn get_registry_index(&self, registry: &str) -> CargoResult<Url> {
- Ok(match self.get_string(&format!("registries.{}.index", registry))? {
- Some(index) => {
- let url = index.val.to_url()?;
- if url.username() != "" || url.password().is_some() {
- bail!("Registry URLs may not contain credentials");
+ Ok(
+ match self.get_string(&format!("registries.{}.index", registry))? {
+ Some(index) => {
+ let url = index.val.to_url()?;
+ if url.username() != "" || url.password().is_some() {
+ bail!("Registry URLs may not contain credentials");
+ }
+ url
}
- url
- }
- None => bail!("No index found for registry: `{}`", registry),
- })
+ None => bail!("No index found for registry: `{}`", registry),
+ },
+ )
}
/// Loads credentials config from the credentials file into the ConfigValue object, if present.
let mut contents = String::new();
let mut file = File::open(&credentials)?;
file.read_to_string(&mut contents).chain_err(|| {
- format!("failed to read configuration file `{}`", credentials.display())
+ format!(
+ "failed to read configuration file `{}`",
+ credentials.display()
+ )
})?;
- let toml = cargo_toml::parse(&contents,
- &credentials,
- self).chain_err(|| {
- format!("could not parse TOML configuration in `{}`", credentials.display())
+ let toml = cargo_toml::parse(&contents, &credentials, self).chain_err(|| {
+ format!(
+ "could not parse TOML configuration in `{}`",
+ credentials.display()
+ )
})?;
let mut value = CV::from_toml(&credentials, toml).chain_err(|| {
- format!("failed to load TOML configuration from `{}`", credentials.display())
+ format!(
+ "failed to load TOML configuration from `{}`",
+ credentials.display()
+ )
})?;
// backwards compatibility for old .cargo/credentials layout
/// Look for a path for `tool` in an environment variable or config path, but return `None`
/// if it's not present.
fn maybe_get_tool(&self, tool: &str) -> CargoResult<Option<PathBuf>> {
- let var = tool.chars().flat_map(|c| c.to_uppercase()).collect::<String>();
+ let var = tool.chars()
+ .flat_map(|c| c.to_uppercase())
+ .collect::<String>();
if let Some(tool_path) = env::var_os(&var) {
let maybe_relative = match tool_path.to_str() {
Some(s) => s.contains("/") || s.contains("\\"),
} else {
PathBuf::from(tool_path)
};
- return Ok(Some(path))
+ return Ok(Some(path));
}
let var = format!("build.{}", tool);
}
pub fn http(&self) -> CargoResult<&RefCell<Easy>> {
- let http = self.easy.try_borrow_with(|| {
- ops::http_handle(self).map(RefCell::new)
- })?;
+ let http = self.easy
+ .try_borrow_with(|| ops::http_handle(self).map(RefCell::new))?;
{
let mut http = http.borrow_mut();
http.reset();
}
pub fn crates_io_source_id<F>(&self, f: F) -> CargoResult<SourceId>
- where F: FnMut() -> CargoResult<SourceId>
+ where
+ F: FnMut() -> CargoResult<SourceId>,
{
Ok(self.crates_io_source_id.try_borrow_with(f)?.clone())
}
#[derive(Eq, PartialEq, Clone, Copy)]
pub enum Location {
Project,
- Global
+ Global,
}
-#[derive(Eq,PartialEq,Clone,Deserialize)]
+#[derive(Eq, PartialEq, Clone, Deserialize)]
pub enum ConfigValue {
Integer(i64, PathBuf),
String(String, PathBuf),
impl fmt::Debug for ConfigValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
- CV::Integer(i, ref path) => write!(f, "{} (from {})", i,
- path.display()),
- CV::Boolean(b, ref path) => write!(f, "{} (from {})", b,
- path.display()),
- CV::String(ref s, ref path) => write!(f, "{} (from {})", s,
- path.display()),
+ CV::Integer(i, ref path) => write!(f, "{} (from {})", i, path.display()),
+ CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, path.display()),
+ CV::String(ref s, ref path) => write!(f, "{} (from {})", s, path.display()),
CV::List(ref list, ref path) => {
write!(f, "[")?;
for (i, &(ref s, ref path)) in list.iter().enumerate() {
- if i > 0 { write!(f, ", ")?; }
+ if i > 0 {
+ write!(f, ", ")?;
+ }
write!(f, "{} (from {})", s, path.display())?;
}
write!(f, "] (from {})", path.display())
toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())),
toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())),
toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())),
- toml::Value::Array(val) => {
- Ok(CV::List(val.into_iter().map(|toml| {
- match toml {
+ toml::Value::Array(val) => Ok(CV::List(
+ val.into_iter()
+ .map(|toml| match toml {
toml::Value::String(val) => Ok((val, path.to_path_buf())),
- v => bail!("expected string but found {} in list",
- v.type_str()),
- }
- }).collect::<CargoResult<_>>()?, path.to_path_buf()))
- }
- toml::Value::Table(val) => {
- Ok(CV::Table(val.into_iter().map(|(key, value)| {
- let value = CV::from_toml(path, value).chain_err(|| {
- format!("failed to parse key `{}`", key)
- })?;
- Ok((key, value))
- }).collect::<CargoResult<_>>()?, path.to_path_buf()))
- }
- v => bail!("found TOML configuration value of unknown type `{}`",
- v.type_str()),
+ v => bail!("expected string but found {} in list", v.type_str()),
+ })
+ .collect::<CargoResult<_>>()?,
+ path.to_path_buf(),
+ )),
+ toml::Value::Table(val) => Ok(CV::Table(
+ val.into_iter()
+ .map(|(key, value)| {
+ let value = CV::from_toml(path, value)
+ .chain_err(|| format!("failed to parse key `{}`", key))?;
+ Ok((key, value))
+ })
+ .collect::<CargoResult<_>>()?,
+ path.to_path_buf(),
+ )),
+ v => bail!(
+ "found TOML configuration value of unknown type `{}`",
+ v.type_str()
+ ),
}
}
CV::Boolean(s, _) => toml::Value::Boolean(s),
CV::String(s, _) => toml::Value::String(s),
CV::Integer(i, _) => toml::Value::Integer(i),
- CV::List(l, _) => toml::Value::Array(l
- .into_iter()
- .map(|(s, _)| toml::Value::String(s))
- .collect()),
- CV::Table(l, _) => toml::Value::Table(l.into_iter()
- .map(|(k, v)| (k, v.into_toml()))
- .collect()),
+ CV::List(l, _) => {
+ toml::Value::Array(l.into_iter().map(|(s, _)| toml::Value::String(s)).collect())
+ }
+ CV::Table(l, _) => {
+ toml::Value::Table(l.into_iter().map(|(k, v)| (k, v.into_toml())).collect())
+ }
}
}
fn merge(&mut self, from: ConfigValue) -> CargoResult<()> {
match (self, from) {
- (&mut CV::String(..), CV::String(..)) |
- (&mut CV::Integer(..), CV::Integer(..)) |
- (&mut CV::Boolean(..), CV::Boolean(..)) => {}
+ (&mut CV::String(..), CV::String(..))
+ | (&mut CV::Integer(..), CV::Integer(..))
+ | (&mut CV::Boolean(..), CV::Boolean(..)) => {}
(&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => {
let new = mem::replace(new, Vec::new());
old.extend(new.into_iter());
let path = value.definition_path().to_path_buf();
let entry = entry.get_mut();
entry.merge(value).chain_err(|| {
- format!("failed to merge key `{}` between \
- files:\n \
- file 1: {}\n \
- file 2: {}",
- key,
- entry.definition_path().display(),
- path.display())
-
+ format!(
+ "failed to merge key `{}` between \
+ files:\n \
+ file 1: {}\n \
+ file 2: {}",
+ key,
+ entry.definition_path().display(),
+ path.display()
+ )
})?;
}
- Vacant(entry) => { entry.insert(value); }
+ Vacant(entry) => {
+ entry.insert(value);
+ }
};
}
}
(expected, found) => {
- return Err(internal(format!("expected {}, but found {}",
- expected.desc(), found.desc())))
+ return Err(internal(format!(
+ "expected {}, but found {}",
+ expected.desc(),
+ found.desc()
+ )))
}
}
}
}
- pub fn table(&self, key: &str)
- -> CargoResult<(&HashMap<String, ConfigValue>, &Path)> {
+ pub fn table(&self, key: &str) -> CargoResult<(&HashMap<String, ConfigValue>, &Path)> {
match *self {
CV::Table(ref table, ref p) => Ok((table, p)),
_ => self.expected("table", key),
}
pub fn definition_path(&self) -> &Path {
- match *self {
- CV::Boolean(_, ref p) |
- CV::Integer(_, ref p) |
- CV::String(_, ref p) |
- CV::List(_, ref p) |
- CV::Table(_, ref p) => p
+ match *self {
+ CV::Boolean(_, ref p)
+ | CV::Integer(_, ref p)
+ | CV::String(_, ref p)
+ | CV::List(_, ref p)
+ | CV::Table(_, ref p) => p,
}
}
pub fn expected<T>(&self, wanted: &str, key: &str) -> CargoResult<T> {
- bail!("expected a {}, but found a {} for `{}` in {}",
- wanted, self.desc(), key,
- self.definition_path().display())
+ bail!(
+ "expected a {}, but found a {} for `{}` in {}",
+ wanted,
+ self.desc(),
+ key,
+ self.definition_path().display()
+ )
}
}
}
fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()>
- where F: FnMut(&Path) -> CargoResult<()>
+where
+ F: FnMut(&Path) -> CargoResult<()>,
{
let mut stash: HashSet<PathBuf> = HashSet::new();
// in our history to be sure we pick up that standard location for
// information.
let home = homedir(pwd).ok_or_else(|| {
- format_err!("Cargo couldn't find your home directory. \
- This probably means that $HOME was not set.")
+ format_err!(
+ "Cargo couldn't find your home directory. \
+ This probably means that $HOME was not set."
+ )
})?;
let config = home.join("config");
if !stash.contains(&config) && fs::metadata(&config).is_ok() {
Ok(())
}
-pub fn save_credentials(cfg: &Config,
- token: String,
- registry: Option<String>) -> CargoResult<()> {
+pub fn save_credentials(cfg: &Config, token: String, registry: Option<String>) -> CargoResult<()> {
let mut file = {
cfg.home_path.create_dir()?;
- cfg.home_path.open_rw(Path::new("credentials"), cfg,
- "credentials' config file")?
+ cfg.home_path
+ .open_rw(Path::new("credentials"), cfg, "credentials' config file")?
};
let (key, value) = {
if let Some(registry) = registry {
let mut map = HashMap::new();
map.insert(registry, table);
- ("registries".into(), CV::Table(map, file.path().to_path_buf()))
+ (
+ "registries".into(),
+ CV::Table(map, file.path().to_path_buf()),
+ )
} else {
("registry".into(), table)
}
let mut contents = String::new();
file.read_to_string(&mut contents).chain_err(|| {
- format!("failed to read configuration file `{}`", file.path().display())
+ format!(
+ "failed to read configuration file `{}`",
+ file.path().display()
+ )
})?;
let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?;
if let Some(token) = toml.as_table_mut().unwrap().remove("token") {
let mut map = HashMap::new();
map.insert("token".to_string(), token);
- toml.as_table_mut().unwrap().insert("registry".into(), map.into());
+ toml.as_table_mut()
+ .unwrap()
+ .insert("registry".into(), map.into());
}
- toml.as_table_mut()
- .unwrap()
- .insert(key, value.into_toml());
+ toml.as_table_mut().unwrap().insert(key, value.into_toml());
let contents = toml.to_string();
file.seek(SeekFrom::Start(0))?;
return Ok(());
#[cfg(unix)]
- fn set_permissions(file: & File, mode: u32) -> CargoResult<()> {
+ fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
use std::os::unix::fs::PermissionsExt;
let mut perms = file.metadata()?.permissions();
#[cfg(not(unix))]
#[allow(unused)]
- fn set_permissions(file: & File, mode: u32) -> CargoResult<()> {
+ fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
Ok(())
}
}
use std::collections::{HashMap, HashSet};
use std::hash::Hash;
-pub use self::Freshness::{Fresh, Dirty};
+pub use self::Freshness::{Dirty, Fresh};
#[derive(Debug)]
pub struct DependencyQueue<K: Eq + Hash, V> {
impl Freshness {
pub fn combine(&self, other: Freshness) -> Freshness {
- match *self { Fresh => other, Dirty => Dirty }
+ match *self {
+ Fresh => other,
+ Dirty => Dirty,
+ }
}
}
///
/// It is assumed that any dependencies of this package will eventually also
/// be added to the dependency queue.
- pub fn queue(&mut self,
- fresh: Freshness,
- key: K,
- value: V,
- dependencies: &[K]) -> &mut V {
+ pub fn queue(&mut self, fresh: Freshness, key: K, value: V, dependencies: &[K]) -> &mut V {
let slot = match self.dep_map.entry(key.clone()) {
Occupied(v) => return &mut v.into_mut().1,
Vacant(v) => v,
let mut my_dependencies = HashSet::new();
for dep in dependencies {
my_dependencies.insert(dep.clone());
- let rev = self.reverse_dep_map.entry(dep.clone())
- .or_insert_with(HashSet::new);
+ let rev = self.reverse_dep_map
+ .entry(dep.clone())
+ .or_insert_with(HashSet::new);
rev.insert(key.clone());
}
&mut slot.insert((my_dependencies, value)).1
results.insert(key.clone(), IN_PROGRESS);
- let depth = 1 + map.get(&key)
- .into_iter()
- .flat_map(|it| it)
- .map(|dep| depth(dep, map, results))
- .max()
- .unwrap_or(0);
+ let depth = 1
+ + map.get(&key)
+ .into_iter()
+ .flat_map(|it| it)
+ .map(|dep| depth(dep, map, results))
+ .max()
+ .unwrap_or(0);
*results.get_mut(key).unwrap() = depth;
// TODO: it'd be best here to throw in a heuristic of crate size as
// well. For example how long did this crate historically take to
// compile? How large is its source code? etc.
- let next = self.dep_map.iter()
+ let next = self.dep_map
+ .iter()
.filter(|&(_, &(ref deps, _))| deps.is_empty())
.map(|(key, _)| key.clone())
.max_by_key(|k| self.depth[k]);
let key = match next {
Some(key) => key,
- None => return None
+ None => return None,
};
let (_, data) = self.dep_map.remove(&key).unwrap();
- let fresh = if self.dirty.contains(&key) {Dirty} else {Fresh};
+ let fresh = if self.dirty.contains(&key) {
+ Dirty
+ } else {
+ Fresh
+ };
self.pending.insert(key.clone());
Some((fresh, key, data))
}
#![allow(unknown_lints)]
use std::fmt;
-use std::process::{Output, ExitStatus};
+use std::process::{ExitStatus, Output};
use std::str;
use core::{TargetKind, Workspace};
pub trait CargoResultExt<T, E> {
fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>>
- where F: FnOnce() -> D,
- D: fmt::Display + Send + Sync + 'static;
+ where
+ F: FnOnce() -> D,
+ D: fmt::Display + Send + Sync + 'static;
}
impl<T, E> CargoResultExt<T, E> for Result<T, E>
- where E: Into<Error>,
+where
+ E: Into<Error>,
{
fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>>
- where F: FnOnce() -> D,
- D: fmt::Display + Send + Sync + 'static,
+ where
+ F: FnOnce() -> D,
+ D: fmt::Display + Send + Sync + 'static,
{
self.map_err(|failure| {
let context = f();
pub enum Test {
Multiple,
Doc,
- UnitTest{kind: TargetKind, name: String, pkg_name: String}
+ UnitTest {
+ kind: TargetKind,
+ name: String,
+ pkg_name: String,
+ },
}
impl CargoTestError {
if errors.is_empty() {
panic!("Cannot create CargoTestError from empty Vec")
}
- let desc = errors.iter().map(|error| error.desc.clone())
- .collect::<Vec<String>>()
- .join("\n");
+ let desc = errors
+ .iter()
+ .map(|error| error.desc.clone())
+ .collect::<Vec<String>>()
+ .join("\n");
CargoTestError {
test,
desc,
pub fn hint(&self, ws: &Workspace) -> String {
match self.test {
- Test::UnitTest{ref kind, ref name, ref pkg_name} => {
+ Test::UnitTest {
+ ref kind,
+ ref name,
+ ref pkg_name,
+ } => {
let pkg_info = if ws.members().count() > 1 && ws.is_virtual() {
format!("-p {} ", pkg_name)
} else {
};
match *kind {
- TargetKind::Bench =>
- format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name),
- TargetKind::Bin =>
- format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name),
- TargetKind::Lib(_) =>
- format!("test failed, to rerun pass '{}--lib'", pkg_info),
- TargetKind::Test =>
- format!("test failed, to rerun pass '{}--test {}'", pkg_info, name),
- TargetKind::ExampleBin | TargetKind::ExampleLib(_) =>
- format!("test failed, to rerun pass '{}--example {}", pkg_info, name),
- _ => "test failed.".into()
+ TargetKind::Bench => {
+ format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name)
+ }
+ TargetKind::Bin => {
+ format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name)
+ }
+ TargetKind::Lib(_) => format!("test failed, to rerun pass '{}--lib'", pkg_info),
+ TargetKind::Test => {
+ format!("test failed, to rerun pass '{}--test {}'", pkg_info, name)
+ }
+ TargetKind::ExampleBin | TargetKind::ExampleLib(_) => {
+ format!("test failed, to rerun pass '{}--example {}", pkg_info, name)
+ }
+ _ => "test failed.".into(),
}
- },
+ }
Test::Doc => "test failed, to rerun pass '--doc'".into(),
- _ => "test failed.".into()
+ _ => "test failed.".into(),
}
}
}
pub struct CliError {
pub error: Option<CargoError>,
pub unknown: bool,
- pub exit_code: i32
+ pub exit_code: i32,
}
impl CliError {
pub fn new(error: CargoError, code: i32) -> CliError {
let unknown = error.downcast_ref::<Internal>().is_some();
- CliError { error: Some(error), exit_code: code, unknown }
+ CliError {
+ error: Some(error),
+ exit_code: code,
+ unknown,
+ }
}
pub fn code(code: i32) -> CliError {
- CliError { error: None, exit_code: code, unknown: false }
+ CliError {
+ error: None,
+ exit_code: code,
+ unknown: false,
+ }
}
}
}
}
-
// =============================================================================
// Construction helpers
-pub fn process_error(msg: &str,
- status: Option<&ExitStatus>,
- output: Option<&Output>) -> ProcessError
-{
+pub fn process_error(
+ msg: &str,
+ status: Option<&ExitStatus>,
+ output: Option<&Output>,
+) -> ProcessError {
let exit = match status {
Some(s) => status_to_string(s),
None => "never executed".to_string(),
use std::fs::{self, File, OpenOptions};
-use std::io::{Seek, Read, Write, SeekFrom};
+use std::io::{Read, Seek, SeekFrom, Write};
use std::io;
-use std::path::{Path, PathBuf, Display};
+use std::path::{Display, Path, PathBuf};
use termcolor::Color::Cyan;
-use fs2::{FileExt, lock_contended_error};
+use fs2::{lock_contended_error, FileExt};
#[allow(unused_imports)]
use libc;
use util::Config;
use util::paths;
-use util::errors::{CargoResult, CargoResultExt, CargoError};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
pub struct FileLock {
f: Option<File>,
for entry in path.parent().unwrap().read_dir()? {
let entry = entry?;
if Some(&entry.file_name()[..]) == path.file_name() {
- continue
+ continue;
}
let kind = entry.file_type()?;
if kind.is_dir() {
///
/// The returned file can be accessed to look at the path and also has
/// read/write access to the underlying file.
- pub fn open_rw<P>(&self,
- path: P,
- config: &Config,
- msg: &str) -> CargoResult<FileLock>
- where P: AsRef<Path>
+ pub fn open_rw<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
+ where
+ P: AsRef<Path>,
{
- self.open(path.as_ref(),
- OpenOptions::new().read(true).write(true).create(true),
- State::Exclusive,
- config,
- msg)
+ self.open(
+ path.as_ref(),
+ OpenOptions::new().read(true).write(true).create(true),
+ State::Exclusive,
+ config,
+ msg,
+ )
}
/// Opens shared access to a file, returning the locked version of a file.
/// The returned file can be accessed to look at the path and also has read
/// access to the underlying file. Any writes to the file will return an
/// error.
- pub fn open_ro<P>(&self,
- path: P,
- config: &Config,
- msg: &str) -> CargoResult<FileLock>
- where P: AsRef<Path>
+ pub fn open_ro<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
+ where
+ P: AsRef<Path>,
{
- self.open(path.as_ref(),
- OpenOptions::new().read(true),
- State::Shared,
- config,
- msg)
+ self.open(
+ path.as_ref(),
+ OpenOptions::new().read(true),
+ State::Shared,
+ config,
+ msg,
+ )
}
- fn open(&self,
- path: &Path,
- opts: &OpenOptions,
- state: State,
- config: &Config,
- msg: &str) -> CargoResult<FileLock> {
+ fn open(
+ &self,
+ path: &Path,
+ opts: &OpenOptions,
+ state: State,
+ config: &Config,
+ msg: &str,
+ ) -> CargoResult<FileLock> {
let path = self.root.join(path);
// If we want an exclusive lock then if we fail because of NotFound it's
// likely because an intermediate directory didn't exist, so try to
// create the directory and then continue.
- let f = opts.open(&path).or_else(|e| {
- if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
- create_dir_all(path.parent().unwrap())?;
- opts.open(&path)
- } else {
- Err(e)
- }
- }).chain_err(|| {
- format!("failed to open: {}", path.display())
- })?;
+ let f = opts.open(&path)
+ .or_else(|e| {
+ if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
+ create_dir_all(path.parent().unwrap())?;
+ opts.open(&path)
+ } else {
+ Err(e)
+ }
+ })
+ .chain_err(|| format!("failed to open: {}", path.display()))?;
match state {
State::Exclusive => {
- acquire(config, msg, &path,
- &|| f.try_lock_exclusive(),
- &|| f.lock_exclusive())?;
+ acquire(config, msg, &path, &|| f.try_lock_exclusive(), &|| {
+ f.lock_exclusive()
+ })?;
}
State::Shared => {
- acquire(config, msg, &path,
- &|| f.try_lock_shared(),
- &|| f.lock_shared())?;
+ acquire(config, msg, &path, &|| f.try_lock_shared(), &|| {
+ f.lock_shared()
+ })?;
}
State::Unlocked => {}
-
}
- Ok(FileLock { f: Some(f), path, state })
+ Ok(FileLock {
+ f: Some(f),
+ path,
+ state,
+ })
}
}
///
/// Returns an error if the lock could not be acquired or if any error other
/// than a contention error happens.
-fn acquire(config: &Config,
- msg: &str,
- path: &Path,
- try: &Fn() -> io::Result<()>,
- block: &Fn() -> io::Result<()>) -> CargoResult<()> {
-
+fn acquire(
+ config: &Config,
+ msg: &str,
+ path: &Path,
+ try: &Fn() -> io::Result<()>,
+ block: &Fn() -> io::Result<()>,
+) -> CargoResult<()> {
// File locking on Unix is currently implemented via `flock`, which is known
// to be broken on NFS. We could in theory just ignore errors that happen on
// NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking
//
// [1]: https://github.com/rust-lang/cargo/issues/2615
if is_on_nfs_mount(path) {
- return Ok(())
+ return Ok(());
}
match try() {
// implement file locking. We detect that here via the return value of
// locking (e.g. inspecting errno).
#[cfg(unix)]
- Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) => return Ok(()),
+ Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) =>
+ {
+ return Ok(())
+ }
#[cfg(target_os = "linux")]
- Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => return Ok(()),
+ Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) =>
+ {
+ return Ok(())
+ }
Err(e) => {
if e.raw_os_error() != lock_contended_error().raw_os_error() {
let e = CargoError::from(e);
let cx = format!("failed to lock file: {}", path.display());
- return Err(e.context(cx).into())
+ return Err(e.context(cx).into());
}
}
}
let msg = format!("waiting for file lock on {}", msg);
config.shell().status_with_color("Blocking", &msg, Cyan)?;
- block().chain_err(|| {
- format!("failed to lock file: {}", path.display())
- })?;
+ block().chain_err(|| format!("failed to lock file: {}", path.display()))?;
return Ok(());
#[cfg(all(target_os = "linux", not(target_env = "musl")))]
Err(e) => {
if e.kind() == io::ErrorKind::NotFound {
if let Some(p) = path.parent() {
- return create_dir_all(p).and_then(|()| create_dir(path))
+ return create_dir_all(p).and_then(|()| create_dir(path));
}
}
Err(e)
use std::collections::hash_map::{HashMap, Keys};
pub struct Graph<N> {
- nodes: HashMap<N, HashSet<N>>
+ nodes: HashMap<N, HashSet<N>>,
}
enum Mark {
InProgress,
- Done
+ Done,
}
pub type Nodes<'a, N> = Keys<'a, N, HashSet<N>>;
impl<N: Eq + Hash + Clone> Graph<N> {
pub fn new() -> Graph<N> {
- Graph { nodes: HashMap::new() }
+ Graph {
+ nodes: HashMap::new(),
+ }
}
pub fn add(&mut self, node: N, children: &[N]) {
- self.nodes.entry(node)
+ self.nodes
+ .entry(node)
.or_insert_with(HashSet::new)
.extend(children.iter().cloned());
}
pub fn link(&mut self, node: N, child: N) {
- self.nodes.entry(node).or_insert_with(HashSet::new).insert(child);
+ self.nodes
+ .entry(node)
+ .or_insert_with(HashSet::new)
+ .insert(child);
}
pub fn get_nodes(&self) -> &HashMap<N, HashSet<N>> {
}
impl<N: Eq + Hash> PartialEq for Graph<N> {
- fn eq(&self, other: &Graph<N>) -> bool { self.nodes.eq(&other.nodes) }
+ fn eq(&self, other: &Graph<N>) -> bool {
+ self.nodes.eq(&other.nodes)
+ }
}
impl<N: Eq + Hash> Eq for Graph<N> {}
impl<N: Eq + Hash + Clone> Clone for Graph<N> {
fn clone(&self) -> Graph<N> {
- Graph { nodes: self.nodes.clone() }
+ Graph {
+ nodes: self.nodes.clone(),
+ }
}
}
#![allow(deprecated)]
use hex;
-use std::hash::{Hasher, Hash, SipHasher};
+use std::hash::{Hash, Hasher, SipHasher};
pub fn to_hex(num: u64) -> String {
hex::encode(&[
- (num >> 0) as u8,
- (num >> 8) as u8,
+ (num >> 0) as u8,
+ (num >> 8) as u8,
(num >> 16) as u8,
(num >> 24) as u8,
(num >> 32) as u8,
loop {
let manifest = current.join(file);
if fs::metadata(&manifest).is_ok() {
- return Ok(manifest)
+ return Ok(manifest);
}
match current.parent() {
}
}
- bail!("could not find `{}` in `{}` or any parent directory",
- file, pwd.display())
+ bail!(
+ "could not find `{}` in `{}` or any parent directory",
+ file,
+ pwd.display()
+ )
}
/// Find the root Cargo.toml
-pub fn find_root_manifest_for_wd(manifest_path: Option<&str>, cwd: &Path)
- -> CargoResult<PathBuf> {
+pub fn find_root_manifest_for_wd(manifest_path: Option<&str>, cwd: &Path) -> CargoResult<PathBuf> {
match manifest_path {
Some(path) => {
let absolute_path = paths::normalize_path(&cwd.join(&path));
bail!("manifest path `{}` does not exist", path)
}
Ok(absolute_path)
- },
+ }
None => find_project_manifest(cwd, "Cargo.toml"),
}
}
let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
if job.is_null() {
- return None
+ return None;
}
let job = Handle { inner: job };
// our children will reside in the job once we spawn a process.
let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
info = mem::zeroed();
- info.BasicLimitInformation.LimitFlags =
- JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
- let r = SetInformationJobObject(job.inner,
- JobObjectExtendedLimitInformation,
- &mut info as *mut _ as LPVOID,
- mem::size_of_val(&info) as DWORD);
+ info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
+ let r = SetInformationJobObject(
+ job.inner,
+ JobObjectExtendedLimitInformation,
+ &mut info as *mut _ as LPVOID,
+ mem::size_of_val(&info) as DWORD,
+ );
if r == 0 {
- return None
+ return None;
}
// Assign our process to this job object, meaning that our children will
let me = GetCurrentProcess();
let r = AssignProcessToJobObject(job.inner, me);
if r == 0 {
- return None
+ return None;
}
Some(Setup { job })
let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
info = mem::zeroed();
let r = SetInformationJobObject(
- self.job.inner,
- JobObjectExtendedLimitInformation,
- &mut info as *mut _ as LPVOID,
- mem::size_of_val(&info) as DWORD);
+ self.job.inner,
+ JobObjectExtendedLimitInformation,
+ &mut info as *mut _ as LPVOID,
+ mem::size_of_val(&info) as DWORD,
+ );
if r == 0 {
- info!("failed to configure job object to defaults: {}",
- last_err());
+ info!("failed to configure job object to defaults: {}", last_err());
}
}
}
let mut jobs: Jobs = mem::zeroed();
let r = QueryInformationJobObject(
- self.job.inner,
- JobObjectBasicProcessIdList,
- &mut jobs as *mut _ as LPVOID,
- mem::size_of_val(&jobs) as DWORD,
- 0 as *mut _);
+ self.job.inner,
+ JobObjectBasicProcessIdList,
+ &mut jobs as *mut _ as LPVOID,
+ mem::size_of_val(&jobs) as DWORD,
+ 0 as *mut _,
+ );
if r == 0 {
info!("failed to query job object: {}", last_err());
- return false
+ return false;
}
let mut killed = false;
assert!(list.len() > 0);
info!("found {} remaining processes", list.len() - 1);
- let list = list.iter().filter(|&&id| {
- // let's not kill ourselves
- id as DWORD != GetCurrentProcessId()
- }).filter_map(|&id| {
- // Open the process with the necessary rights, and if this
- // fails then we probably raced with the process exiting so we
- // ignore the problem.
- let flags = PROCESS_QUERY_INFORMATION |
- PROCESS_TERMINATE |
- SYNCHRONIZE;
- let p = OpenProcess(flags, FALSE, id as DWORD);
- if p.is_null() {
- None
- } else {
- Some(Handle { inner: p })
- }
- }).filter(|p| {
- // Test if this process was actually in the job object or not.
- // If it's not then we likely raced with something else
- // recycling this PID, so we just skip this step.
- let mut res = 0;
- let r = IsProcessInJob(p.inner, self.job.inner, &mut res);
- if r == 0 {
- info!("failed to test is process in job: {}", last_err());
- return false
- }
- res == TRUE
- });
-
+ let list = list.iter()
+ .filter(|&&id| {
+ // let's not kill ourselves
+ id as DWORD != GetCurrentProcessId()
+ })
+ .filter_map(|&id| {
+ // Open the process with the necessary rights, and if this
+ // fails then we probably raced with the process exiting so we
+ // ignore the problem.
+ let flags = PROCESS_QUERY_INFORMATION | PROCESS_TERMINATE | SYNCHRONIZE;
+ let p = OpenProcess(flags, FALSE, id as DWORD);
+ if p.is_null() {
+ None
+ } else {
+ Some(Handle { inner: p })
+ }
+ })
+ .filter(|p| {
+ // Test if this process was actually in the job object or not.
+ // If it's not then we likely raced with something else
+ // recycling this PID, so we just skip this step.
+ let mut res = 0;
+ let r = IsProcessInJob(p.inner, self.job.inner, &mut res);
+ if r == 0 {
+ info!("failed to test is process in job: {}", last_err());
+ return false;
+ }
+ res == TRUE
+ });
for p in list {
// Load the file which this process was spawned from. We then
// later use this for identification purposes.
let mut buf = [0; 1024];
- let r = GetProcessImageFileNameW(p.inner,
- buf.as_mut_ptr(),
- buf.len() as DWORD);
+ let r = GetProcessImageFileNameW(p.inner, buf.as_mut_ptr(), buf.len() as DWORD);
if r == 0 {
info!("failed to get image name: {}", last_err());
- continue
+ continue;
}
let s = OsString::from_wide(&buf[..r as usize]);
info!("found remaining: {:?}", s);
if let Some(s) = s.to_str() {
if s.contains("mspdbsrv") {
info!("\toops, this is mspdbsrv");
- continue
+ continue;
}
}
let r = WaitForSingleObject(p.inner, INFINITE);
if r != 0 {
info!("failed to wait for process to die: {}", last_err());
- return false
+ return false;
}
killed = true;
}
impl Drop for Handle {
fn drop(&mut self) {
- unsafe { CloseHandle(self.inner); }
+ unsafe {
+ CloseHandle(self.inner);
+ }
}
}
}
use std::cmp;
pub fn lev_distance(me: &str, t: &str) -> usize {
- if me.is_empty() { return t.chars().count(); }
- if t.is_empty() { return me.chars().count(); }
+ if me.is_empty() {
+ return t.chars().count();
+ }
+ if t.is_empty() {
+ return me.chars().count();
+ }
let mut dcol = (0..t.len() + 1).collect::<Vec<_>>();
let mut t_last = 0;
for (i, sc) in me.chars().enumerate() {
-
let mut current = i;
dcol[0] = current + 1;
for (j, tc) in t.chars().enumerate() {
-
let next = dcol[j + 1];
if sc == tc {
#[test]
fn test_lev_distance() {
- use std::char::{ from_u32, MAX };
+ use std::char::{from_u32, MAX};
// Test bytelength agnosticity
for c in (0u32..MAX as u32)
- .filter_map(|i| from_u32(i))
- .map(|i| i.to_string()) {
+ .filter_map(|i| from_u32(i))
+ .map(|i| i.to_string())
+ {
assert_eq!(lev_distance(&c, &c), 0);
}
use serde::ser;
use serde_json::{self, Value};
-use core::{PackageId, Target, Profile};
+use core::{PackageId, Profile, Target};
pub trait Message: ser::Serialize {
fn reason(&self) -> &str;
pub use self::cfg::{Cfg, CfgExpr};
-pub use self::config::{Config, ConfigValue, homedir};
-pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness};
-pub use self::errors::{CargoResult, CargoResultExt, CargoError, Test, CliResult};
-pub use self::errors::{CliError, ProcessError, CargoTestError};
-pub use self::errors::{process_error, internal};
+pub use self::config::{homedir, Config, ConfigValue};
+pub use self::dependency_queue::{DependencyQueue, Dirty, Fresh, Freshness};
+pub use self::errors::{CargoError, CargoResult, CargoResultExt, CliResult, Test};
+pub use self::errors::{CargoTestError, CliError, ProcessError};
+pub use self::errors::{internal, process_error};
pub use self::flock::{FileLock, Filesystem};
pub use self::graph::Graph;
-pub use self::hex::{to_hex, short_hash, hash_u64};
-pub use self::lev_distance::{lev_distance};
-pub use self::paths::{join_paths, path2bytes, bytes2path, dylib_path};
-pub use self::paths::{normalize_path, dylib_path_envvar, without_prefix};
+pub use self::hex::{short_hash, to_hex, hash_u64};
+pub use self::lev_distance::lev_distance;
+pub use self::paths::{dylib_path, join_paths, bytes2path, path2bytes};
+pub use self::paths::{dylib_path_envvar, normalize_path, without_prefix};
pub use self::process_builder::{process, ProcessBuilder};
pub use self::rustc::Rustc;
pub use self::sha256::Sha256;
pub use self::to_semver::ToSemver;
pub use self::to_url::ToUrl;
-pub use self::vcs::{GitRepo, HgRepo, PijulRepo, FossilRepo};
+pub use self::vcs::{FossilRepo, GitRepo, HgRepo, PijulRepo};
pub use self::read2::read2;
pub use self::progress::Progress;
for e in err.causes() {
if let Some(git_err) = e.downcast_ref::<git2::Error>() {
match git_err.class() {
- git2::ErrorClass::Net |
- git2::ErrorClass::Os => return true,
- _ => ()
+ git2::ErrorClass::Net | git2::ErrorClass::Os => return true,
+ _ => (),
}
}
if let Some(curl_err) = e.downcast_ref::<curl::Error>() {
- if curl_err.is_couldnt_connect() ||
- curl_err.is_couldnt_resolve_proxy() ||
- curl_err.is_couldnt_resolve_host() ||
- curl_err.is_operation_timedout() ||
- curl_err.is_recv_error() {
- return true
+ if curl_err.is_couldnt_connect() || curl_err.is_couldnt_resolve_proxy()
+ || curl_err.is_couldnt_resolve_host()
+ || curl_err.is_operation_timedout() || curl_err.is_recv_error()
+ {
+ return true;
}
}
if let Some(not_200) = e.downcast_ref::<HttpNot200>() {
if 500 <= not_200.code && not_200.code < 600 {
- return true
+ return true;
}
}
}
/// cargo_result = network::with_retry(&config, || something.download());
/// ```
pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
- where F: FnMut() -> CargoResult<T>
+where
+ F: FnMut() -> CargoResult<T>,
{
let mut remaining = config.net_retry()?;
loop {
match callback() {
Ok(ret) => return Ok(ret),
Err(ref e) if maybe_spurious(e) && remaining > 0 => {
- let msg = format!("spurious network error ({} tries \
- remaining): {}", remaining, e);
+ let msg = format!(
+ "spurious network error ({} tries \
+ remaining): {}",
+ remaining, e
+ );
config.shell().warn(msg)?;
remaining -= 1;
}
#[test]
fn with_retry_repeats_the_call_then_works() {
//Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
- let error1 = HttpNot200 { code: 501, url: "Uri".to_string() }.into();
- let error2 = HttpNot200 { code: 502, url: "Uri".to_string() }.into();
+ let error1 = HttpNot200 {
+ code: 501,
+ url: "Uri".to_string(),
+ }.into();
+ let error2 = HttpNot200 {
+ code: 502,
+ url: "Uri".to_string(),
+ }.into();
let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
let config = Config::default().unwrap();
let result = with_retry(&config, || results.pop().unwrap());
//Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
//String error messages are not considered spurious
- let error1 = CargoError::from(HttpNot200 { code: 501, url: "Uri".to_string() });
+ let error1 = CargoError::from(HttpNot200 {
+ code: 501,
+ url: "Uri".to_string(),
+ });
let error1 = CargoError::from(error1.context("A non-spurious wrapping err"));
- let error2 = CargoError::from(HttpNot200 { code: 502, url: "Uri".to_string() });
+ let error2 = CargoError::from(HttpNot200 {
+ code: 502,
+ url: "Uri".to_string(),
+ });
let error2 = CargoError::from(error2.context("A second chained error"));
let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
let config = Config::default().unwrap();
use std::fs::{self, File, OpenOptions};
use std::io;
use std::io::prelude::*;
-use std::path::{Path, PathBuf, Component};
+use std::path::{Component, Path, PathBuf};
use util::{internal, CargoResult};
-use util::errors::{CargoResultExt, Internal, CargoError};
+use util::errors::{CargoError, CargoResultExt, Internal};
pub fn join_paths<T: AsRef<OsStr>>(paths: &[T], env: &str) -> CargoResult<OsString> {
let err = match env::join_paths(paths.iter()) {
let err = CargoError::from(err);
let explain = Internal::new(format_err!("failed to join path array: {:?}", paths));
let err = CargoError::from(err.context(explain));
- let more_explain = format!("failed to join search paths together\n\
- Does ${} have an unterminated quote character?",
- env);
+ let more_explain = format!(
+ "failed to join search paths together\n\
+ Does ${} have an unterminated quote character?",
+ env
+ );
Err(err.context(more_explain).into())
}
pub fn dylib_path_envvar() -> &'static str {
- if cfg!(windows) {"PATH"}
- else if cfg!(target_os = "macos") {"DYLD_LIBRARY_PATH"}
- else {"LD_LIBRARY_PATH"}
+ if cfg!(windows) {
+ "PATH"
+ } else if cfg!(target_os = "macos") {
+ "DYLD_LIBRARY_PATH"
+ } else {
+ "LD_LIBRARY_PATH"
+ }
}
pub fn dylib_path() -> Vec<PathBuf> {
pub fn normalize_path(path: &Path) -> PathBuf {
let mut components = path.components().peekable();
- let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek()
- .cloned() {
+ let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
components.next();
PathBuf::from(c.as_os_str())
} else {
for component in components {
match component {
Component::Prefix(..) => unreachable!(),
- Component::RootDir => { ret.push(component.as_os_str()); }
+ Component::RootDir => {
+ ret.push(component.as_os_str());
+ }
Component::CurDir => {}
- Component::ParentDir => { ret.pop(); }
- Component::Normal(c) => { ret.push(c); }
+ Component::ParentDir => {
+ ret.pop();
+ }
+ Component::Normal(c) => {
+ ret.push(c);
+ }
}
}
ret
}
f.read_to_end(&mut ret)?;
Ok(ret)
- })().chain_err(|| {
- format!("failed to read `{}`", path.display())
- })?;
+ })()
+ .chain_err(|| format!("failed to read `{}`", path.display()))?;
Ok(res)
}
let mut f = File::create(path)?;
f.write_all(contents)?;
Ok(())
- })().chain_err(|| {
- format!("failed to write `{}`", path.display())
- })?;
+ })()
+ .chain_err(|| format!("failed to write `{}`", path.display()))?;
Ok(())
}
pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> {
(|| -> CargoResult<()> {
let mut f = OpenOptions::new()
- .write(true)
- .append(true)
- .create(true)
- .open(path)?;
+ .write(true)
+ .append(true)
+ .create(true)
+ .open(path)?;
f.write_all(contents)?;
Ok(())
- })().chain_err(|| {
- internal(format!("failed to write `{}`", path.display()))
- })?;
+ })()
+ .chain_err(|| internal(format!("failed to write `{}`", path.display())))?;
Ok(())
}
pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> {
match path.as_os_str().to_str() {
Some(s) => Ok(s.as_bytes()),
- None => Err(format_err!("invalid non-unicode path: {}",
- path.display())),
+ None => Err(format_err!("invalid non-unicode path: {}", path.display())),
}
}
pub struct PathAncestors<'a> {
current: Option<&'a Path>,
- stop_at: Option<PathBuf>
+ stop_at: Option<PathBuf>,
}
impl<'a> PathAncestors<'a> {
fn _remove_dir_all(p: &Path) -> CargoResult<()> {
if p.symlink_metadata()?.file_type().is_symlink() {
- return remove_file(p)
+ return remove_file(p);
}
- let entries = p.read_dir().chain_err(|| {
- format!("failed to read directory `{}`", p.display())
- })?;
+ let entries = p.read_dir()
+ .chain_err(|| format!("failed to read directory `{}`", p.display()))?;
for entry in entries {
let entry = entry?;
let path = entry.path();
}
fn _remove_dir(p: &Path) -> CargoResult<()> {
- fs::remove_dir(p).chain_err(|| {
- format!("failed to remove directory `{}`", p.display())
- })?;
+ fs::remove_dir(p).chain_err(|| format!("failed to remove directory `{}`", p.display()))?;
Ok(())
}
}
}
- Err(err).chain_err(|| {
- format!("failed to remove file `{}`", p.display())
- })?;
+ Err(err).chain_err(|| format!("failed to remove file `{}`", p.display()))?;
Ok(())
}
fn set_not_readonly(p: &Path) -> io::Result<bool> {
let mut perms = p.metadata()?.permissions();
if !perms.readonly() {
- return Ok(false)
+ return Ok(false);
}
perms.set_readonly(false);
fs::set_permissions(p, perms)?;
use std::collections::HashMap;
use std::env;
-use std::ffi::{OsString, OsStr};
+use std::ffi::{OsStr, OsString};
use std::fmt;
use std::path::Path;
-use std::process::{Command, Stdio, Output};
+use std::process::{Command, Output, Stdio};
use jobserver::Client;
use shell_escape::escape;
-use util::{CargoResult, CargoResultExt, CargoError, process_error, read2};
+use util::{process_error, CargoError, CargoResult, CargoResultExt, read2};
/// A builder object for an external process, similar to `std::process::Command`.
#[derive(Clone, Debug)]
/// (chainable) Add many args to the args list.
pub fn args<T: AsRef<OsStr>>(&mut self, arguments: &[T]) -> &mut ProcessBuilder {
- self.args.extend(arguments.iter().map(|t| {
- t.as_ref().to_os_string()
- }));
+ self.args
+ .extend(arguments.iter().map(|t| t.as_ref().to_os_string()));
self
}
/// (chainable) Replace args with new args list
pub fn args_replace<T: AsRef<OsStr>>(&mut self, arguments: &[T]) -> &mut ProcessBuilder {
- self.args = arguments.iter().map(|t| {
- t.as_ref().to_os_string()
- }).collect();
+ self.args = arguments
+ .iter()
+ .map(|t| t.as_ref().to_os_string())
+ .collect();
self
}
}
/// (chainable) Set an environment variable for the process.
- pub fn env<T: AsRef<OsStr>>(&mut self, key: &str,
- val: T) -> &mut ProcessBuilder {
- self.env.insert(key.to_string(), Some(val.as_ref().to_os_string()));
+ pub fn env<T: AsRef<OsStr>>(&mut self, key: &str, val: T) -> &mut ProcessBuilder {
+ self.env
+ .insert(key.to_string(), Some(val.as_ref().to_os_string()));
self
}
/// Get an environment variable as the process will see it (will inherit from environment
/// unless explicitally unset).
pub fn get_env(&self, var: &str) -> Option<OsString> {
- self.env.get(var).cloned().or_else(|| Some(env::var_os(var)))
+ self.env
+ .get(var)
+ .cloned()
+ .or_else(|| Some(env::var_os(var)))
.and_then(|s| s)
}
/// Get all environment variables explicitally set or unset for the process (not inherited
/// vars).
- pub fn get_envs(&self) -> &HashMap<String, Option<OsString>> { &self.env }
+ pub fn get_envs(&self) -> &HashMap<String, Option<OsString>> {
+ &self.env
+ }
/// Set the `make` jobserver. See the [jobserver crate][jobserver_docs] for
/// more information.
pub fn exec(&self) -> CargoResult<()> {
let mut command = self.build_command();
let exit = command.status().chain_err(|| {
- process_error(&format!("could not execute process `{}`",
- self.debug_string()), None, None)
+ process_error(
+ &format!("could not execute process `{}`", self.debug_string()),
+ None,
+ None,
+ )
})?;
if exit.success() {
Ok(())
} else {
Err(process_error(
- &format!("process didn't exit successfully: `{}`", self.debug_string()),
- Some(&exit), None).into())
+ &format!(
+ "process didn't exit successfully: `{}`",
+ self.debug_string()
+ ),
+ Some(&exit),
+ None,
+ ).into())
}
}
let mut command = self.build_command();
let error = command.exec();
- Err(CargoError::from(error).context(
- process_error(
+ Err(CargoError::from(error)
+ .context(process_error(
&format!("could not execute process `{}`", self.debug_string()),
None,
None,
- ),
- ).into())
+ ))
+ .into())
}
/// On unix, executes the process using the unix syscall `execvp`, which will block this
process_error(
&format!("could not execute process `{}`", self.debug_string()),
None,
- None)
+ None,
+ )
})?;
if output.status.success() {
Ok(output)
} else {
Err(process_error(
- &format!("process didn't exit successfully: `{}`", self.debug_string()),
- Some(&output.status), Some(&output)).into())
+ &format!(
+ "process didn't exit successfully: `{}`",
+ self.debug_string()
+ ),
+ Some(&output.status),
+ Some(&output),
+ ).into())
}
}
/// If any invocations of these function return an error, it will be propagated.
///
/// Optionally, output can be passed to errors using `print_output`
- pub fn exec_with_streaming(&self,
- on_stdout_line: &mut FnMut(&str) -> CargoResult<()>,
- on_stderr_line: &mut FnMut(&str) -> CargoResult<()>,
- print_output: bool)
- -> CargoResult<Output> {
+ pub fn exec_with_streaming(
+ &self,
+ on_stdout_line: &mut FnMut(&str) -> CargoResult<()>,
+ on_stderr_line: &mut FnMut(&str) -> CargoResult<()>,
+ print_output: bool,
+ ) -> CargoResult<Output> {
let mut stdout = Vec::new();
let mut stderr = Vec::new();
}
};
let data = data.drain(..idx);
- let dst = if is_out {&mut stdout} else {&mut stderr};
+ let dst = if is_out { &mut stdout } else { &mut stderr };
let start = dst.len();
dst.extend(data);
for line in String::from_utf8_lossy(&dst[start..]).lines() {
- if callback_error.is_some() { break }
+ if callback_error.is_some() {
+ break;
+ }
let callback_result = if is_out {
on_stdout_line(line)
} else {
}
})?;
child.wait()
- })().chain_err(|| {
+ })()
+ .chain_err(|| {
process_error(
- &format!("could not execute process `{}`",
- self.debug_string()),
+ &format!("could not execute process `{}`", self.debug_string()),
None,
- None)
+ None,
+ )
})?;
let output = Output {
stdout,
};
{
- let to_print = if print_output {
- Some(&output)
- } else {
- None
- };
+ let to_print = if print_output { Some(&output) } else { None };
if !output.status.success() {
return Err(process_error(
- &format!("process didn't exit successfully: `{}`", self.debug_string()),
- Some(&output.status), to_print).into())
+ &format!(
+ "process didn't exit successfully: `{}`",
+ self.debug_string()
+ ),
+ Some(&output.status),
+ to_print,
+ ).into());
} else if let Some(e) = callback_error {
let cx = process_error(
&format!("failed to parse process output: `{}`", self.debug_string()),
Some(&output.status),
to_print,
);
- return Err(CargoError::from(e).context(cx).into())
+ return Err(CargoError::from(e).context(cx).into());
}
}
}
for (k, v) in &self.env {
match *v {
- Some(ref v) => { command.env(k, v); }
- None => { command.env_remove(k); }
+ Some(ref v) => {
+ command.env(k, v);
+ }
+ None => {
+ command.env_remove(k);
+ }
}
}
if let Some(ref c) = self.jobserver {
}
pub fn start<T: fmt::Display>(desc: T) -> Profiler {
- if enabled_level().is_none() { return Profiler { desc: String::new() } }
+ if enabled_level().is_none() {
+ return Profiler {
+ desc: String::new(),
+ };
+ }
PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::Instant::now()));
let start = PROFILE_STACK.with(|stack| stack.borrow_mut().pop().unwrap());
let duration = start.elapsed();
- let duration_ms = duration.as_secs() * 1000 + u64::from(duration.subsec_nanos() / 1_000_000);
+ let duration_ms =
+ duration.as_secs() * 1000 + u64::from(duration.subsec_nanos() / 1_000_000);
let stack_len = PROFILE_STACK.with(|stack| stack.borrow().len());
if stack_len == 0 {
fn print(lvl: usize, msgs: &[Message], enabled: usize) {
- if lvl > enabled { return }
+ if lvl > enabled {
+ return;
+ }
let mut last = 0;
for (i, &(l, time, ref msg)) in msgs.iter().enumerate() {
- if l != lvl { continue }
- println!("{} {:6}ms - {}",
- repeat(" ").take(lvl + 1).collect::<String>(),
- time, msg);
+ if l != lvl {
+ continue;
+ }
+ println!(
+ "{} {:6}ms - {}",
+ repeat(" ").take(lvl + 1).collect::<String>(),
+ time,
+ msg
+ );
print(lvl + 1, &msgs[last..i], enabled);
last = i;
}
-
}
MESSAGES.with(|msgs_rc| {
let mut msgs = msgs_rc.borrow_mut();
- msgs.push((0, duration_ms,
- mem::replace(&mut self.desc, String::new())));
+ msgs.push((0, duration_ms, mem::replace(&mut self.desc, String::new())));
print(0, &msgs, enabled);
});
} else {
use std::cmp;
use std::env;
use std::iter;
-use std::time::{Instant, Duration};
+use std::time::{Duration, Instant};
use core::shell::Verbosity;
-use util::{Config, CargoResult};
+use util::{CargoResult, Config};
pub struct Progress<'cfg> {
state: Option<State<'cfg>>,
Err(_) => false,
};
if cfg.shell().verbosity() == Verbosity::Quiet || dumb {
- return Progress { state: None }
+ return Progress { state: None };
}
Progress {
- state: cfg.shell().err_width().map(|n| {
- State {
- config: cfg,
- width: cmp::min(n, 80),
- first: true,
- last_update: Instant::now(),
- name: name.to_string(),
- done: false,
- }
+ state: cfg.shell().err_width().map(|n| State {
+ config: cfg,
+ width: cmp::min(n, 80),
+ first: true,
+ last_update: Instant::now(),
+ name: name.to_string(),
+ done: false,
}),
}
}
pub fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> {
match self.state {
Some(ref mut s) => s.tick(cur, max),
- None => Ok(())
+ None => Ok(()),
}
}
}
impl<'cfg> State<'cfg> {
fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> {
if self.done {
- return Ok(())
+ return Ok(());
}
// Don't update too often as it can cause excessive performance loss
if self.first {
let delay = Duration::from_millis(500);
if self.last_update.elapsed() < delay {
- return Ok(())
+ return Ok(());
}
self.first = false;
} else {
let interval = Duration::from_millis(100);
if self.last_update.elapsed() < interval {
- return Ok(())
+ return Ok(());
}
}
self.last_update = Instant::now();
// Draw the `===>`
if hashes > 0 {
- for _ in 0..hashes-1 {
+ for _ in 0..hashes - 1 {
string.push_str("=");
}
if cur == max {
use std::io;
use std::mem;
use std::os::unix::prelude::*;
- use std::process::{ChildStdout, ChildStderr};
+ use std::process::{ChildStderr, ChildStdout};
use libc;
- pub fn read2(mut out_pipe: ChildStdout,
- mut err_pipe: ChildStderr,
- data: &mut FnMut(bool, &mut Vec<u8>, bool)) -> io::Result<()> {
+ pub fn read2(
+ mut out_pipe: ChildStdout,
+ mut err_pipe: ChildStderr,
+ data: &mut FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
unsafe {
libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
if r == -1 {
let err = io::Error::last_os_error();
if err.kind() == io::ErrorKind::Interrupted {
- continue
+ continue;
}
- return Err(err)
+ return Err(err);
}
// Read as much as we can from each pipe, ignoring EWOULDBLOCK or
// reader will return Ok(0), in which case we'll see `Ok` ourselves. In
// this case we flip the other fd back into blocking mode and read
// whatever's leftover on that file descriptor.
- let handle = |res: io::Result<_>| {
- match res {
- Ok(_) => Ok(true),
- Err(e) => {
- if e.kind() == io::ErrorKind::WouldBlock {
- Ok(false)
- } else {
- Err(e)
- }
+ let handle = |res: io::Result<_>| match res {
+ Ok(_) => Ok(true),
+ Err(e) => {
+ if e.kind() == io::ErrorKind::WouldBlock {
+ Ok(false)
+ } else {
+ Err(e)
}
}
};
use std::io;
use std::os::windows::prelude::*;
- use std::process::{ChildStdout, ChildStderr};
+ use std::process::{ChildStderr, ChildStdout};
use std::slice;
use self::miow::iocp::{CompletionPort, CompletionStatus};
done: bool,
}
- pub fn read2(out_pipe: ChildStdout,
- err_pipe: ChildStderr,
- data: &mut FnMut(bool, &mut Vec<u8>, bool)) -> io::Result<()> {
+ pub fn read2(
+ out_pipe: ChildStdout,
+ err_pipe: ChildStderr,
+ data: &mut FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
let mut out = Vec::new();
let mut err = Vec::new();
if v.capacity() == v.len() {
v.reserve(1);
}
- slice::from_raw_parts_mut(v.as_mut_ptr().offset(v.len() as isize),
- v.capacity() - v.len())
+ slice::from_raw_parts_mut(
+ v.as_mut_ptr().offset(v.len() as isize),
+ v.capacity() - v.len(),
+ )
}
}
use std::path::PathBuf;
-use util::{self, CargoResult, internal, ProcessBuilder};
+use util::{self, internal, CargoResult, ProcessBuilder};
/// Information on the `rustc` executable
#[derive(Debug)]
let output = cmd.exec_with_output()?;
- let verbose_version = String::from_utf8(output.stdout).map_err(|_| {
- internal("rustc -v didn't return utf8 output")
- })?;
+ let verbose_version = String::from_utf8(output.stdout)
+ .map_err(|_| internal("rustc -v didn't return utf8 output"))?;
let host = {
- let triple = verbose_version.lines().find(|l| {
- l.starts_with("host: ")
- }).map(|l| &l[6..]).ok_or_else(|| internal("rustc -v didn't have a line for `host:`"))?;
+ let triple = verbose_version
+ .lines()
+ .find(|l| l.starts_with("host: "))
+ .map(|l| &l[6..])
+ .ok_or_else(|| internal("rustc -v didn't have a line for `host:`"))?;
triple.to_string()
};
extern crate crypto_hash;
-use self::crypto_hash::{Hasher,Algorithm};
+use self::crypto_hash::{Algorithm, Hasher};
use std::io::Write;
pub struct Sha256(Hasher);
}
impl ToSemver for Version {
- fn to_semver(self) -> CargoResult<Version> { Ok(self) }
+ fn to_semver(self) -> CargoResult<Version> {
+ Ok(self)
+ }
}
impl<'a> ToSemver for &'a str {
impl<'a> ToUrl for &'a str {
fn to_url(self) -> CargoResult<Url> {
- Url::parse(self).map_err(|s| {
- format_err!("invalid url `{}`: {}", self, s)
- })
+ Url::parse(self).map_err(|s| format_err!("invalid url `{}`: {}", self, s))
}
}
impl<'a> ToUrl for &'a Path {
fn to_url(self) -> CargoResult<Url> {
- Url::from_file_path(self).map_err(|()| {
- format_err!("invalid path url `{}`", self.display())
- })
+ Url::from_file_path(self).map_err(|()| format_err!("invalid path url `{}`", self.display()))
}
}
-use std::collections::{HashMap, BTreeMap, HashSet, BTreeSet};
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::fmt;
use std::fs;
use std::path::{Path, PathBuf};
use toml;
use url::Url;
-use core::{SourceId, Profiles, PackageIdSpec, GitReference, WorkspaceConfig, WorkspaceRootConfig};
-use core::{Summary, Manifest, Target, Dependency, PackageId};
-use core::{EitherManifest, Epoch, VirtualManifest, Features, Feature};
+use core::{GitReference, PackageIdSpec, Profiles, SourceId, WorkspaceConfig, WorkspaceRootConfig};
+use core::{Dependency, Manifest, PackageId, Summary, Target};
+use core::{EitherManifest, Epoch, Feature, Features, VirtualManifest};
use core::dependency::{Kind, Platform};
-use core::manifest::{LibKind, Profile, ManifestMetadata, Lto};
+use core::manifest::{LibKind, Lto, ManifestMetadata, Profile};
use sources::CRATES_IO;
use util::paths;
-use util::{self, ToUrl, Config};
+use util::{self, Config, ToUrl};
use util::errors::{CargoError, CargoResult, CargoResultExt};
mod targets;
use self::targets::targets;
-pub fn read_manifest(path: &Path, source_id: &SourceId, config: &Config)
- -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
- trace!("read_manifest; path={}; source-id={}", path.display(), source_id);
+pub fn read_manifest(
+ path: &Path,
+ source_id: &SourceId,
+ config: &Config,
+) -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
+ trace!(
+ "read_manifest; path={}; source-id={}",
+ path.display(),
+ source_id
+ );
let contents = paths::read(path)?;
- let ret = do_read_manifest(&contents, path, source_id, config).chain_err(|| {
- format!("failed to parse manifest at `{}`", path.display())
- })?;
+ let ret = do_read_manifest(&contents, path, source_id, config)
+ .chain_err(|| format!("failed to parse manifest at `{}`", path.display()))?;
Ok(ret)
}
-fn do_read_manifest(contents: &str,
- manifest_file: &Path,
- source_id: &SourceId,
- config: &Config)
- -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
+fn do_read_manifest(
+ contents: &str,
+ manifest_file: &Path,
+ source_id: &SourceId,
+ config: &Config,
+) -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
let package_root = manifest_file.parent().unwrap();
let toml = {
let manifest = Rc::new(manifest);
return if manifest.project.is_some() || manifest.package.is_some() {
- let (mut manifest, paths) = TomlManifest::to_real_manifest(&manifest,
- source_id,
- package_root,
- config)?;
+ let (mut manifest, paths) =
+ TomlManifest::to_real_manifest(&manifest, source_id, package_root, config)?;
for key in unused {
manifest.add_warning(format!("unused manifest key: {}", key));
}
if !manifest.targets().iter().any(|t| !t.is_custom_build()) {
- bail!("no targets specified in the manifest\n \
- either src/lib.rs, src/main.rs, a [lib] section, or \
- [[bin]] section must be present")
+ bail!(
+ "no targets specified in the manifest\n \
+ either src/lib.rs, src/main.rs, a [lib] section, or \
+ [[bin]] section must be present"
+ )
}
Ok((EitherManifest::Real(manifest), paths))
} else {
- let (m, paths) = TomlManifest::to_virtual_manifest(&manifest,
- source_id,
- package_root,
- config)?;
+ let (m, paths) =
+ TomlManifest::to_virtual_manifest(&manifest, source_id, package_root, config)?;
Ok((EitherManifest::Virtual(m), paths))
};
}
dst.push_str(key);
}
- Path::Some { parent } |
- Path::NewtypeVariant { parent } |
- Path::NewtypeStruct { parent } => stringify(dst, parent),
+ Path::Some { parent }
+ | Path::NewtypeVariant { parent }
+ | Path::NewtypeStruct { parent } => stringify(dst, parent),
}
}
}
-pub fn parse(toml: &str,
- file: &Path,
- config: &Config) -> CargoResult<toml::Value> {
+pub fn parse(toml: &str, file: &Path, config: &Config) -> CargoResult<toml::Value> {
let first_error = match toml.parse() {
Ok(ret) => return Ok(ret),
Err(e) => e,
let mut second_parser = toml::de::Deserializer::new(toml);
second_parser.set_require_newline_after_table(false);
if let Ok(ret) = toml::Value::deserialize(&mut second_parser) {
- let msg = format!("\
+ let msg = format!(
+ "\
TOML file found which contains invalid syntax and will soon not parse
at `{}`.
The TOML spec requires newlines after table definitions (e.g. `[a] b = 1` is
invalid), but this file has a table header which does not have a newline after
it. A newline needs to be added and this warning will soon become a hard error
-in the future.", file.display());
+in the future.",
+ file.display()
+ );
config.shell().warn(&msg)?;
- return Ok(ret)
+ return Ok(ret);
}
let first_error = CargoError::from(first_error);
impl<'de> de::Deserialize<'de> for TomlDependency {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where D: de::Deserializer<'de>
+ where
+ D: de::Deserializer<'de>,
{
struct TomlDependencyVisitor;
type Value = TomlDependency;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- formatter.write_str("a version string like \"0.9.8\" or a \
- detailed dependency like { version = \"0.9.8\" }")
+ formatter.write_str(
+ "a version string like \"0.9.8\" or a \
+ detailed dependency like { version = \"0.9.8\" }",
+ )
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
- where E: de::Error
+ where
+ E: de::Error,
{
Ok(TomlDependency::Simple(s.to_owned()))
}
fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
- where V: de::MapAccess<'de>
+ where
+ V: de::MapAccess<'de>,
{
let mvd = de::value::MapAccessDeserializer::new(map);
DetailedTomlDependency::deserialize(mvd).map(TomlDependency::Detailed)
features: Option<Vec<String>>,
optional: Option<bool>,
default_features: Option<bool>,
- #[serde(rename = "default_features")]
- default_features2: Option<bool>,
+ #[serde(rename = "default_features")] default_features2: Option<bool>,
package: Option<String>,
}
impl<'de> de::Deserialize<'de> for TomlOptLevel {
fn deserialize<D>(d: D) -> Result<TomlOptLevel, D::Error>
- where D: de::Deserializer<'de>
+ where
+ D: de::Deserializer<'de>,
{
struct Visitor;
}
fn visit_i64<E>(self, value: i64) -> Result<TomlOptLevel, E>
- where E: de::Error
+ where
+ E: de::Error,
{
Ok(TomlOptLevel(value.to_string()))
}
fn visit_str<E>(self, value: &str) -> Result<TomlOptLevel, E>
- where E: de::Error
+ where
+ E: de::Error,
{
if value == "s" || value == "z" {
Ok(TomlOptLevel(value.to_string()))
} else {
- Err(E::custom(format!("must be an integer, `z`, or `s`, \
- but found: {}", value)))
+ Err(E::custom(format!(
+ "must be an integer, `z`, or `s`, \
+ but found: {}",
+ value
+ )))
}
}
}
impl ser::Serialize for TomlOptLevel {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
match self.0.parse::<u32>() {
Ok(n) => n.serialize(serializer),
impl<'de> de::Deserialize<'de> for U32OrBool {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where D: de::Deserializer<'de>
+ where
+ D: de::Deserializer<'de>,
{
struct Visitor;
}
fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
- where E: de::Error,
+ where
+ E: de::Error,
{
Ok(U32OrBool::Bool(b))
}
fn visit_i64<E>(self, u: i64) -> Result<Self::Value, E>
- where E: de::Error,
+ where
+ E: de::Error,
{
Ok(U32OrBool::U32(u as u32))
}
fn visit_u64<E>(self, u: u64) -> Result<Self::Value, E>
- where E: de::Error,
+ where
+ E: de::Error,
{
Ok(U32OrBool::U32(u as u32))
}
#[derive(Deserialize, Serialize, Clone, Debug, Default)]
pub struct TomlProfile {
- #[serde(rename = "opt-level")]
- opt_level: Option<TomlOptLevel>,
+ #[serde(rename = "opt-level")] opt_level: Option<TomlOptLevel>,
lto: Option<StringOrBool>,
- #[serde(rename = "codegen-units")]
- codegen_units: Option<u32>,
+ #[serde(rename = "codegen-units")] codegen_units: Option<u32>,
debug: Option<U32OrBool>,
- #[serde(rename = "debug-assertions")]
- debug_assertions: Option<bool>,
+ #[serde(rename = "debug-assertions")] debug_assertions: Option<bool>,
rpath: Option<bool>,
panic: Option<String>,
- #[serde(rename = "overflow-checks")]
- overflow_checks: Option<bool>,
+ #[serde(rename = "overflow-checks")] overflow_checks: Option<bool>,
incremental: Option<bool>,
}
impl<'de> de::Deserialize<'de> for StringOrBool {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where D: de::Deserializer<'de>
+ where
+ D: de::Deserializer<'de>,
{
struct Visitor;
}
fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
- where E: de::Error,
+ where
+ E: de::Error,
{
Ok(StringOrBool::Bool(b))
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
- where E: de::Error,
+ where
+ E: de::Error,
{
Ok(StringOrBool::String(s.to_string()))
}
impl<'de> de::Deserialize<'de> for VecStringOrBool {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where D: de::Deserializer<'de>
+ where
+ D: de::Deserializer<'de>,
{
struct Visitor;
}
fn visit_seq<V>(self, v: V) -> Result<Self::Value, V::Error>
- where V: de::SeqAccess<'de>
+ where
+ V: de::SeqAccess<'de>,
{
let seq = de::value::SeqAccessDeserializer::new(v);
Vec::deserialize(seq).map(VecStringOrBool::VecString)
}
fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
- where E: de::Error,
+ where
+ E: de::Error,
{
Ok(VecStringOrBool::Bool(b))
}
exclude: Option<Vec<String>>,
include: Option<Vec<String>>,
publish: Option<VecStringOrBool>,
- #[serde(rename = "publish-lockfile")]
- publish_lockfile: Option<bool>,
+ #[serde(rename = "publish-lockfile")] publish_lockfile: Option<bool>,
workspace: Option<String>,
- #[serde(rename = "im-a-teapot")]
- im_a_teapot: Option<bool>,
+ #[serde(rename = "im-a-teapot")] im_a_teapot: Option<bool>,
// package metadata
description: Option<String>,
keywords: Option<Vec<String>>,
categories: Option<Vec<String>>,
license: Option<String>,
- #[serde(rename = "license-file")]
- license_file: Option<String>,
+ #[serde(rename = "license-file")] license_file: Option<String>,
repository: Option<String>,
metadata: Option<toml::Value>,
rust: Option<String>,
#[derive(Debug, Deserialize, Serialize)]
pub struct TomlWorkspace {
members: Option<Vec<String>>,
- #[serde(rename = "default-members")]
- default_members: Option<Vec<String>>,
+ #[serde(rename = "default-members")] default_members: Option<Vec<String>>,
exclude: Option<Vec<String>>,
}
impl TomlManifest {
pub fn prepare_for_publish(&self, config: &Config) -> CargoResult<TomlManifest> {
- let mut package = self.package.as_ref()
- .or_else(|| self.project.as_ref())
- .unwrap()
- .clone();
+ let mut package = self.package
+ .as_ref()
+ .or_else(|| self.project.as_ref())
+ .unwrap()
+ .clone();
package.workspace = None;
return Ok(TomlManifest {
package: Some(package),
test: self.test.clone(),
bench: self.bench.clone(),
dependencies: map_deps(config, self.dependencies.as_ref())?,
- dev_dependencies: map_deps(config, self.dev_dependencies.as_ref()
- .or_else(|| self.dev_dependencies2.as_ref()))?,
+ dev_dependencies: map_deps(
+ config,
+ self.dev_dependencies
+ .as_ref()
+ .or_else(|| self.dev_dependencies2.as_ref()),
+ )?,
dev_dependencies2: None,
- build_dependencies: map_deps(config, self.build_dependencies.as_ref()
- .or_else(|| self.build_dependencies2.as_ref()))?,
+ build_dependencies: map_deps(
+ config,
+ self.build_dependencies
+ .as_ref()
+ .or_else(|| self.build_dependencies2.as_ref()),
+ )?,
build_dependencies2: None,
features: self.features.clone(),
target: match self.target.as_ref().map(|target_map| {
- target_map.iter().map(|(k, v)| {
- Ok((k.clone(), TomlPlatform {
- dependencies: map_deps(config, v.dependencies.as_ref())?,
- dev_dependencies: map_deps(config, v.dev_dependencies.as_ref()
- .or_else(|| v.dev_dependencies2.as_ref()))?,
- dev_dependencies2: None,
- build_dependencies: map_deps(config, v.build_dependencies.as_ref()
- .or_else(|| v.build_dependencies2.as_ref()))?,
- build_dependencies2: None,
- }))
- }).collect()
+ target_map
+ .iter()
+ .map(|(k, v)| {
+ Ok((
+ k.clone(),
+ TomlPlatform {
+ dependencies: map_deps(config, v.dependencies.as_ref())?,
+ dev_dependencies: map_deps(
+ config,
+ v.dev_dependencies
+ .as_ref()
+ .or_else(|| v.dev_dependencies2.as_ref()),
+ )?,
+ dev_dependencies2: None,
+ build_dependencies: map_deps(
+ config,
+ v.build_dependencies
+ .as_ref()
+ .or_else(|| v.build_dependencies2.as_ref()),
+ )?,
+ build_dependencies2: None,
+ },
+ ))
+ })
+ .collect()
}) {
Some(Ok(v)) => Some(v),
Some(Err(e)) => return Err(e),
cargo_features: self.cargo_features.clone(),
});
- fn map_deps(config: &Config, deps: Option<&BTreeMap<String, TomlDependency>>)
- -> CargoResult<Option<BTreeMap<String, TomlDependency>>>
- {
+ fn map_deps(
+ config: &Config,
+ deps: Option<&BTreeMap<String, TomlDependency>>,
+ ) -> CargoResult<Option<BTreeMap<String, TomlDependency>>> {
let deps = match deps {
Some(deps) => deps,
None => return Ok(None),
TomlDependency::Detailed(ref d) => {
let mut d = d.clone();
d.path.take(); // path dependencies become crates.io deps
- // registry specifications are elaborated to the index URL
+ // registry specifications are elaborated to the index URL
if let Some(registry) = d.registry.take() {
let src = SourceId::alt_registry(config, ®istry)?;
d.registry_index = Some(src.url().to_string());
}
}
- fn to_real_manifest(me: &Rc<TomlManifest>,
- source_id: &SourceId,
- package_root: &Path,
- config: &Config)
- -> CargoResult<(Manifest, Vec<PathBuf>)> {
+ fn to_real_manifest(
+ me: &Rc<TomlManifest>,
+ source_id: &SourceId,
+ package_root: &Path,
+ config: &Config,
+ ) -> CargoResult<(Manifest, Vec<PathBuf>)> {
let mut nested_paths = vec![];
let mut warnings = vec![];
let mut errors = vec![];
let features = Features::new(&cargo_features, &mut warnings)?;
let project = me.project.as_ref().or_else(|| me.package.as_ref());
- let project = project.ok_or_else(|| {
- format_err!("no `package` section found")
- })?;
+ let project = project.ok_or_else(|| format_err!("no `package` section found"))?;
let package_name = project.name.trim();
if package_name.is_empty() {
// If we have no lib at all, use the inferred lib if available
// If we have a lib with a path, we're done
// If we have a lib with no path, use the inferred lib or_else package name
- let targets = targets(me, package_name, package_root, &project.build,
- &mut warnings, &mut errors)?;
+ let targets = targets(
+ me,
+ package_name,
+ package_root,
+ &project.build,
+ &mut warnings,
+ &mut errors,
+ )?;
if targets.is_empty() {
debug!("manifest has no build targets");
}
if let Err(e) = unique_build_targets(&targets, package_root) {
- warnings.push(format!("file found to be present in multiple \
- build targets: {}", e));
+ warnings.push(format!(
+ "file found to be present in multiple \
+ build targets: {}",
+ e
+ ));
}
let mut deps = Vec::new();
let patch;
{
-
let mut cx = Context {
pkgid: Some(&pkgid),
deps: &mut deps,
fn process_dependencies(
cx: &mut Context,
new_deps: Option<&BTreeMap<String, TomlDependency>>,
- kind: Option<Kind>)
- -> CargoResult<()>
- {
+ kind: Option<Kind>,
+ ) -> CargoResult<()> {
let dependencies = match new_deps {
Some(dependencies) => dependencies,
- None => return Ok(())
+ None => return Ok(()),
};
for (n, v) in dependencies.iter() {
let dep = v.to_dependency(n, cx, kind)?;
}
// Collect the deps
- process_dependencies(&mut cx, me.dependencies.as_ref(),
- None)?;
- let dev_deps = me.dev_dependencies.as_ref()
- .or_else(|| me.dev_dependencies2.as_ref());
+ process_dependencies(&mut cx, me.dependencies.as_ref(), None)?;
+ let dev_deps = me.dev_dependencies
+ .as_ref()
+ .or_else(|| me.dev_dependencies2.as_ref());
process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?;
- let build_deps = me.build_dependencies.as_ref()
- .or_else(|| me.build_dependencies2.as_ref());
+ let build_deps = me.build_dependencies
+ .as_ref()
+ .or_else(|| me.build_dependencies2.as_ref());
process_dependencies(&mut cx, build_deps, Some(Kind::Build))?;
for (name, platform) in me.target.iter().flat_map(|t| t) {
cx.platform = Some(name.parse()?);
- process_dependencies(&mut cx, platform.dependencies.as_ref(),
- None)?;
- let build_deps = platform.build_dependencies.as_ref()
- .or_else(|| platform.build_dependencies2.as_ref());
+ process_dependencies(&mut cx, platform.dependencies.as_ref(), None)?;
+ let build_deps = platform
+ .build_dependencies
+ .as_ref()
+ .or_else(|| platform.build_dependencies2.as_ref());
process_dependencies(&mut cx, build_deps, Some(Kind::Build))?;
- let dev_deps = platform.dev_dependencies.as_ref()
- .or_else(|| platform.dev_dependencies2.as_ref());
+ let dev_deps = platform
+ .dev_dependencies
+ .as_ref()
+ .or_else(|| platform.dev_dependencies2.as_ref());
process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?;
}
let name = dep.name();
let prev = names_sources.insert(name, dep.source_id());
if prev.is_some() && prev != Some(dep.source_id()) {
- bail!("Dependency '{}' has different source paths depending on the build \
- target. Each dependency must have a single canonical source path \
- irrespective of build target.", name);
+ bail!(
+ "Dependency '{}' has different source paths depending on the build \
+ target. Each dependency must have a single canonical source path \
+ irrespective of build target.",
+ name
+ );
}
}
}
let exclude = project.exclude.clone().unwrap_or_default();
let include = project.include.clone().unwrap_or_default();
- let summary = Summary::new(pkgid, deps, me.features.clone()
- .unwrap_or_else(BTreeMap::new), project.links.clone())?;
+ let summary = Summary::new(
+ pkgid,
+ deps,
+ me.features.clone().unwrap_or_else(BTreeMap::new),
+ project.links.clone(),
+ )?;
let metadata = ManifestMetadata {
description: project.description.clone(),
homepage: project.homepage.clone(),
links: project.links.clone(),
};
- let workspace_config = match (me.workspace.as_ref(),
- project.workspace.as_ref()) {
- (Some(config), None) => {
- WorkspaceConfig::Root(
- WorkspaceRootConfig::new(
- &package_root, &config.members, &config.default_members, &config.exclude,
- )
- )
- }
- (None, root) => {
- WorkspaceConfig::Member { root: root.cloned() }
- }
- (Some(..), Some(..)) => {
- bail!("cannot configure both `package.workspace` and \
- `[workspace]`, only one can be specified")
- }
+ let workspace_config = match (me.workspace.as_ref(), project.workspace.as_ref()) {
+ (Some(config), None) => WorkspaceConfig::Root(WorkspaceRootConfig::new(
+ &package_root,
+ &config.members,
+ &config.default_members,
+ &config.exclude,
+ )),
+ (None, root) => WorkspaceConfig::Member {
+ root: root.cloned(),
+ },
+ (Some(..), Some(..)) => bail!(
+ "cannot configure both `package.workspace` and \
+ `[workspace]`, only one can be specified"
+ ),
};
let profiles = build_profiles(&me.profile);
let publish = match project.publish {
Some(VecStringOrBool::VecString(ref vecstring)) => {
- features.require(Feature::alternative_registries()).chain_err(|| {
- "the `publish` manifest key is unstable for anything other than a value of true or false"
- })?;
+ features
+ .require(Feature::alternative_registries())
+ .chain_err(|| {
+ "the `publish` manifest key is unstable for anything other than a value of true or false"
+ })?;
Some(vecstring.clone())
- },
+ }
Some(VecStringOrBool::Bool(false)) => Some(vec![]),
None | Some(VecStringOrBool::Bool(true)) => None,
};
};
let epoch = if let Some(ref epoch) = project.rust {
- features.require(Feature::epoch()).chain_err(|| {
- "epoches are unstable"
- })?;
+ features
+ .require(Feature::epoch())
+ .chain_err(|| "epoches are unstable")?;
if let Ok(epoch) = epoch.parse() {
epoch
} else {
bail!("the `rust` key must be one of: `2015`, `2018`")
}
} else {
- Epoch::Epoch2015
+ Epoch::Epoch2015
};
- let mut manifest = Manifest::new(summary,
- targets,
- exclude,
- include,
- project.links.clone(),
- metadata,
- profiles,
- publish,
- publish_lockfile,
- replace,
- patch,
- workspace_config,
- features,
- epoch,
- project.im_a_teapot,
- Rc::clone(me));
+ let mut manifest = Manifest::new(
+ summary,
+ targets,
+ exclude,
+ include,
+ project.links.clone(),
+ metadata,
+ profiles,
+ publish,
+ publish_lockfile,
+ replace,
+ patch,
+ workspace_config,
+ features,
+ epoch,
+ project.im_a_teapot,
+ Rc::clone(me),
+ );
if project.license_file.is_some() && project.license.is_some() {
- manifest.add_warning("only one of `license` or \
- `license-file` is necessary".to_string());
+ manifest.add_warning(
+ "only one of `license` or \
+ `license-file` is necessary"
+ .to_string(),
+ );
}
for warning in warnings {
manifest.add_warning(warning);
Ok((manifest, nested_paths))
}
- fn to_virtual_manifest(me: &Rc<TomlManifest>,
- source_id: &SourceId,
- root: &Path,
- config: &Config)
- -> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
+ fn to_virtual_manifest(
+ me: &Rc<TomlManifest>,
+ source_id: &SourceId,
+ root: &Path,
+ config: &Config,
+ ) -> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
if me.project.is_some() {
bail!("virtual manifests do not define [project]");
}
warnings: &mut warnings,
platform: None,
features: &features,
- root
+ root,
};
(me.replace(&mut cx)?, me.patch(&mut cx)?)
};
let profiles = build_profiles(&me.profile);
let workspace_config = match me.workspace {
- Some(ref config) => {
- WorkspaceConfig::Root(
- WorkspaceRootConfig::new(
- &root, &config.members, &config.default_members, &config.exclude,
- )
- )
- }
+ Some(ref config) => WorkspaceConfig::Root(WorkspaceRootConfig::new(
+ &root,
+ &config.members,
+ &config.default_members,
+ &config.exclude,
+ )),
None => {
bail!("virtual manifests must be configured with [workspace]");
}
};
- Ok((VirtualManifest::new(replace, patch, workspace_config, profiles), nested_paths))
+ Ok((
+ VirtualManifest::new(replace, patch, workspace_config, profiles),
+ nested_paths,
+ ))
}
- fn replace(&self, cx: &mut Context)
- -> CargoResult<Vec<(PackageIdSpec, Dependency)>> {
+ fn replace(&self, cx: &mut Context) -> CargoResult<Vec<(PackageIdSpec, Dependency)>> {
if self.patch.is_some() && self.replace.is_some() {
bail!("cannot specify both [replace] and [patch]");
}
let mut replace = Vec::new();
for (spec, replacement) in self.replace.iter().flat_map(|x| x) {
let mut spec = PackageIdSpec::parse(spec).chain_err(|| {
- format!("replacements must specify a valid semver \
- version to replace, but `{}` does not",
- spec)
+ format!(
+ "replacements must specify a valid semver \
+ version to replace, but `{}` does not",
+ spec
+ )
})?;
if spec.url().is_none() {
spec.set_url(CRATES_IO.parse().unwrap());
TomlDependency::Simple(..) => true,
};
if version_specified {
- bail!("replacements cannot specify a version \
- requirement, but found one for `{}`", spec);
+ bail!(
+ "replacements cannot specify a version \
+ requirement, but found one for `{}`",
+ spec
+ );
}
let mut dep = replacement.to_dependency(spec.name(), cx, None)?;
{
let version = spec.version().ok_or_else(|| {
- format_err!("replacements must specify a version \
- to replace, but `{}` does not",
- spec)
+ format_err!(
+ "replacements must specify a version \
+ to replace, but `{}` does not",
+ spec
+ )
})?;
dep.set_version_req(VersionReq::exact(version));
}
Ok(replace)
}
- fn patch(&self, cx: &mut Context)
- -> CargoResult<HashMap<Url, Vec<Dependency>>> {
+ fn patch(&self, cx: &mut Context) -> CargoResult<HashMap<Url, Vec<Dependency>>> {
let mut patch = HashMap::new();
for (url, deps) in self.patch.iter().flat_map(|x| x) {
let url = match &url[..] {
"crates-io" => CRATES_IO.parse().unwrap(),
_ => url.to_url()?,
};
- patch.insert(url, deps.iter().map(|(name, dep)| {
- dep.to_dependency(name, cx, None)
- }).collect::<CargoResult<Vec<_>>>()?);
+ patch.insert(
+ url,
+ deps.iter()
+ .map(|(name, dep)| dep.to_dependency(name, cx, None))
+ .collect::<CargoResult<Vec<_>>>()?,
+ );
}
Ok(patch)
}
- fn maybe_custom_build(&self,
- build: &Option<StringOrBool>,
- package_root: &Path)
- -> Option<PathBuf> {
+ fn maybe_custom_build(
+ &self,
+ build: &Option<StringOrBool>,
+ package_root: &Path,
+ ) -> Option<PathBuf> {
let build_rs = package_root.join("build.rs");
match *build {
- Some(StringOrBool::Bool(false)) => None, // explicitly no build script
+ Some(StringOrBool::Bool(false)) => None, // explicitly no build script
Some(StringOrBool::Bool(true)) => Some(build_rs.into()),
Some(StringOrBool::String(ref s)) => Some(PathBuf::from(s)),
None => {
}
impl TomlDependency {
- fn to_dependency(&self,
- name: &str,
- cx: &mut Context,
- kind: Option<Kind>)
- -> CargoResult<Dependency> {
+ fn to_dependency(
+ &self,
+ name: &str,
+ cx: &mut Context,
+ kind: Option<Kind>,
+ ) -> CargoResult<Dependency> {
match *self {
- TomlDependency::Simple(ref version) => {
- DetailedTomlDependency {
- version: Some(version.clone()),
- ..Default::default()
- }.to_dependency(name, cx, kind)
- }
- TomlDependency::Detailed(ref details) => {
- details.to_dependency(name, cx, kind)
- }
+ TomlDependency::Simple(ref version) => DetailedTomlDependency {
+ version: Some(version.clone()),
+ ..Default::default()
+ }.to_dependency(name, cx, kind),
+ TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind),
}
}
}
impl DetailedTomlDependency {
- fn to_dependency(&self,
- name: &str,
- cx: &mut Context,
- kind: Option<Kind>)
- -> CargoResult<Dependency> {
- if self.version.is_none() && self.path.is_none() &&
- self.git.is_none() {
- let msg = format!("dependency ({}) specified without \
- providing a local path, Git repository, or \
- version to use. This will be considered an \
- error in future versions", name);
+ fn to_dependency(
+ &self,
+ name: &str,
+ cx: &mut Context,
+ kind: Option<Kind>,
+ ) -> CargoResult<Dependency> {
+ if self.version.is_none() && self.path.is_none() && self.git.is_none() {
+ let msg = format!(
+ "dependency ({}) specified without \
+ providing a local path, Git repository, or \
+ version to use. This will be considered an \
+ error in future versions",
+ name
+ );
cx.warnings.push(msg);
}
let git_only_keys = [
(&self.branch, "branch"),
(&self.tag, "tag"),
- (&self.rev, "rev")
+ (&self.rev, "rev"),
];
for &(key, key_name) in &git_only_keys {
if key.is_some() {
- let msg = format!("key `{}` is ignored for dependency ({}). \
- This will be considered an error in future versions",
- key_name, name);
+ let msg = format!(
+ "key `{}` is ignored for dependency ({}). \
+ This will be considered an error in future versions",
+ key_name, name
+ );
cx.warnings.push(msg)
}
}
cx.features.require(Feature::alternative_registries())?;
SourceId::alt_registry(cx.config, registry)?
}
- None => SourceId::crates_io(cx.config)?
+ None => SourceId::crates_io(cx.config)?,
};
let new_source_id = match (
self.registry.as_ref(),
self.registry_index.as_ref(),
) {
- (Some(_), _, Some(_), _) |
- (Some(_), _, _, Some(_))=> bail!("dependency ({}) specification is ambiguous. \
- Only one of `git` or `registry` is allowed.", name),
- (_, _, Some(_), Some(_)) => bail!("dependency ({}) specification is ambiguous. \
- Only one of `registry` or `registry-index` is allowed.", name),
+ (Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!(
+ "dependency ({}) specification is ambiguous. \
+ Only one of `git` or `registry` is allowed.",
+ name
+ ),
+ (_, _, Some(_), Some(_)) => bail!(
+ "dependency ({}) specification is ambiguous. \
+ Only one of `registry` or `registry-index` is allowed.",
+ name
+ ),
(Some(git), maybe_path, _, _) => {
if maybe_path.is_some() {
- let msg = format!("dependency ({}) specification is ambiguous. \
- Only one of `git` or `path` is allowed. \
- This will be considered an error in future versions", name);
+ let msg = format!(
+ "dependency ({}) specification is ambiguous. \
+ Only one of `git` or `path` is allowed. \
+ This will be considered an error in future versions",
+ name
+ );
cx.warnings.push(msg)
}
.count();
if n_details > 1 {
- let msg = format!("dependency ({}) specification is ambiguous. \
- Only one of `branch`, `tag` or `rev` is allowed. \
- This will be considered an error in future versions", name);
+ let msg = format!(
+ "dependency ({}) specification is ambiguous. \
+ Only one of `branch`, `tag` or `rev` is allowed. \
+ This will be considered an error in future versions",
+ name
+ );
cx.warnings.push(msg)
}
- let reference = self.branch.clone().map(GitReference::Branch)
+ let reference = self.branch
+ .clone()
+ .map(GitReference::Branch)
.or_else(|| self.tag.clone().map(GitReference::Tag))
.or_else(|| self.rev.clone().map(GitReference::Rev))
.unwrap_or_else(|| GitReference::Branch("master".to_string()));
let loc = git.to_url()?;
SourceId::for_git(&loc, reference)?
- },
+ }
(None, Some(path), _, _) => {
cx.nested_paths.push(PathBuf::from(path));
// If the source id for the package we're parsing is a path
} else {
cx.source_id.clone()
}
- },
+ }
(None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?,
(None, None, None, Some(registry_index)) => {
let url = registry_index.to_url()?;
let version = self.version.as_ref().map(|v| &v[..]);
let mut dep = match cx.pkgid {
- Some(id) => {
- Dependency::parse(pkg_name, version, &new_source_id,
- id, cx.config)?
- }
+ Some(id) => Dependency::parse(pkg_name, version, &new_source_id, id, cx.config)?,
None => Dependency::parse_no_deprecated(name, version, &new_source_id)?,
};
dep.set_features(self.features.clone().unwrap_or_default())
- .set_default_features(self.default_features
- .or(self.default_features2)
- .unwrap_or(true))
- .set_optional(self.optional.unwrap_or(false))
- .set_platform(cx.platform.clone())
- .set_registry_id(®istry_id);
+ .set_default_features(
+ self.default_features
+ .or(self.default_features2)
+ .unwrap_or(true),
+ )
+ .set_optional(self.optional.unwrap_or(false))
+ .set_platform(cx.platform.clone())
+ .set_registry_id(®istry_id);
if let Some(kind) = kind {
dep.set_kind(kind);
}
// The intention was to only accept `crate-type` here but historical
// versions of Cargo also accepted `crate_type`, so look for both.
- #[serde(rename = "crate-type")]
- crate_type: Option<Vec<String>>,
- #[serde(rename = "crate_type")]
- crate_type2: Option<Vec<String>>,
+ #[serde(rename = "crate-type")] crate_type: Option<Vec<String>>,
+ #[serde(rename = "crate_type")] crate_type2: Option<Vec<String>>,
path: Option<PathValue>,
test: Option<bool>,
bench: Option<bool>,
doc: Option<bool>,
plugin: Option<bool>,
- #[serde(rename = "proc-macro")]
- proc_macro: Option<bool>,
- #[serde(rename = "proc_macro")]
- proc_macro2: Option<bool>,
+ #[serde(rename = "proc-macro")] proc_macro: Option<bool>,
+ #[serde(rename = "proc_macro")] proc_macro2: Option<bool>,
harness: Option<bool>,
- #[serde(rename = "required-features")]
- required_features: Option<Vec<String>>,
+ #[serde(rename = "required-features")] required_features: Option<Vec<String>>,
}
#[derive(Clone)]
impl<'de> de::Deserialize<'de> for PathValue {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where D: de::Deserializer<'de>
+ where
+ D: de::Deserializer<'de>,
{
Ok(PathValue(String::deserialize(deserializer)?.into()))
}
impl ser::Serialize for PathValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where S: ser::Serializer,
+ where
+ S: ser::Serializer,
{
self.0.serialize(serializer)
}
fn name(&self) -> String {
match self.name {
Some(ref name) => name.clone(),
- None => panic!("target name is required")
+ None => panic!("target name is required"),
}
}
}
fn crate_types(&self) -> Option<&Vec<String>> {
- self.crate_type.as_ref().or_else(|| self.crate_type2.as_ref())
+ self.crate_type
+ .as_ref()
+ .or_else(|| self.crate_type2.as_ref())
}
}
fn build_profiles(profiles: &Option<TomlProfiles>) -> Profiles {
let profiles = profiles.as_ref();
let mut profiles = Profiles {
- release: merge(Profile::default_release(),
- profiles.and_then(|p| p.release.as_ref())),
- dev: merge(Profile::default_dev(),
- profiles.and_then(|p| p.dev.as_ref())),
- test: merge(Profile::default_test(),
- profiles.and_then(|p| p.test.as_ref())),
- test_deps: merge(Profile::default_dev(),
- profiles.and_then(|p| p.dev.as_ref())),
- bench: merge(Profile::default_bench(),
- profiles.and_then(|p| p.bench.as_ref())),
- bench_deps: merge(Profile::default_release(),
- profiles.and_then(|p| p.release.as_ref())),
- doc: merge(Profile::default_doc(),
- profiles.and_then(|p| p.doc.as_ref())),
+ release: merge(
+ Profile::default_release(),
+ profiles.and_then(|p| p.release.as_ref()),
+ ),
+ dev: merge(
+ Profile::default_dev(),
+ profiles.and_then(|p| p.dev.as_ref()),
+ ),
+ test: merge(
+ Profile::default_test(),
+ profiles.and_then(|p| p.test.as_ref()),
+ ),
+ test_deps: merge(
+ Profile::default_dev(),
+ profiles.and_then(|p| p.dev.as_ref()),
+ ),
+ bench: merge(
+ Profile::default_bench(),
+ profiles.and_then(|p| p.bench.as_ref()),
+ ),
+ bench_deps: merge(
+ Profile::default_release(),
+ profiles.and_then(|p| p.release.as_ref()),
+ ),
+ doc: merge(
+ Profile::default_doc(),
+ profiles.and_then(|p| p.doc.as_ref()),
+ ),
custom_build: Profile::default_custom_build(),
- check: merge(Profile::default_check(),
- profiles.and_then(|p| p.dev.as_ref())),
- check_test: merge(Profile::default_check_test(),
- profiles.and_then(|p| p.dev.as_ref())),
+ check: merge(
+ Profile::default_check(),
+ profiles.and_then(|p| p.dev.as_ref()),
+ ),
+ check_test: merge(
+ Profile::default_check_test(),
+ profiles.and_then(|p| p.dev.as_ref()),
+ ),
doctest: Profile::default_doctest(),
};
// The test/bench targets cannot have panic=abort because they'll all get
fn merge(profile: Profile, toml: Option<&TomlProfile>) -> Profile {
let &TomlProfile {
- ref opt_level, ref lto, codegen_units, ref debug, debug_assertions, rpath,
- ref panic, ref overflow_checks, ref incremental,
+ ref opt_level,
+ ref lto,
+ codegen_units,
+ ref debug,
+ debug_assertions,
+ rpath,
+ ref panic,
+ ref overflow_checks,
+ ref incremental,
} = match toml {
Some(toml) => toml,
None => return profile,
None => None,
};
Profile {
- opt_level: opt_level.clone().unwrap_or(TomlOptLevel(profile.opt_level)).0,
+ opt_level: opt_level
+ .clone()
+ .unwrap_or(TomlOptLevel(profile.opt_level))
+ .0,
lto: match *lto {
Some(StringOrBool::Bool(b)) => Lto::Bool(b),
Some(StringOrBool::String(ref n)) => Lto::Named(n.clone()),
use core::Target;
use ops::is_bad_artifact_name;
use util::errors::CargoResult;
-use super::{TomlTarget, LibKind, PathValue, TomlManifest, StringOrBool,
- TomlLibTarget, TomlBinTarget, TomlBenchTarget, TomlExampleTarget, TomlTestTarget};
-
-
-pub fn targets(manifest: &TomlManifest,
- package_name: &str,
- package_root: &Path,
- custom_build: &Option<StringOrBool>,
- warnings: &mut Vec<String>,
- errors: &mut Vec<String>)
- -> CargoResult<Vec<Target>> {
+use super::{LibKind, PathValue, StringOrBool, TomlBenchTarget, TomlBinTarget, TomlExampleTarget,
+ TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget};
+
+pub fn targets(
+ manifest: &TomlManifest,
+ package_name: &str,
+ package_root: &Path,
+ custom_build: &Option<StringOrBool>,
+ warnings: &mut Vec<String>,
+ errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
let mut targets = Vec::new();
let has_lib;
has_lib = false;
}
- targets.extend(
- clean_bins(manifest.bin.as_ref(), package_root, package_name, warnings, has_lib)?
- );
-
- targets.extend(
- clean_examples(manifest.example.as_ref(), package_root, errors)?
- );
-
- targets.extend(
- clean_tests(manifest.test.as_ref(), package_root, errors)?
- );
-
- targets.extend(
- clean_benches(manifest.bench.as_ref(), package_root, warnings, errors)?
- );
+ targets.extend(clean_bins(
+ manifest.bin.as_ref(),
+ package_root,
+ package_name,
+ warnings,
+ has_lib,
+ )?);
+
+ targets.extend(clean_examples(
+ manifest.example.as_ref(),
+ package_root,
+ errors,
+ )?);
+
+ targets.extend(clean_tests(manifest.test.as_ref(), package_root, errors)?);
+
+ targets.extend(clean_benches(
+ manifest.bench.as_ref(),
+ package_root,
+ warnings,
+ errors,
+ )?);
// processing the custom build script
if let Some(custom_build) = manifest.maybe_custom_build(custom_build, package_root) {
- let name = format!("build-script-{}",
- custom_build.file_stem().and_then(|s| s.to_str()).unwrap_or(""));
- targets.push(Target::custom_build_target(&name, package_root.join(custom_build)));
+ let name = format!(
+ "build-script-{}",
+ custom_build
+ .file_stem()
+ .and_then(|s| s.to_str())
+ .unwrap_or("")
+ );
+ targets.push(Target::custom_build_target(
+ &name,
+ package_root.join(custom_build),
+ ));
}
Ok(targets)
}
-
-fn clean_lib(toml_lib: Option<&TomlLibTarget>,
- package_root: &Path,
- package_name: &str,
- warnings: &mut Vec<String>) -> CargoResult<Option<Target>> {
+fn clean_lib(
+ toml_lib: Option<&TomlLibTarget>,
+ package_root: &Path,
+ package_name: &str,
+ warnings: &mut Vec<String>,
+) -> CargoResult<Option<Target>> {
let inferred = inferred_lib(package_root);
let lib = match toml_lib {
Some(lib) => {
..lib.clone()
})
}
- None => inferred.as_ref().map(|lib| {
- TomlTarget {
- name: Some(package_name.to_string()),
- path: Some(PathValue(lib.clone())),
- ..TomlTarget::new()
- }
- })
+ None => inferred.as_ref().map(|lib| TomlTarget {
+ name: Some(package_name.to_string()),
+ path: Some(PathValue(lib.clone())),
+ ..TomlTarget::new()
+ }),
};
let lib = match lib {
Some(ref lib) => lib,
- None => return Ok(None)
+ None => return Ok(None),
};
validate_has_name(lib, "library", "lib")?;
warnings.push(format!(
"path `{}` was erroneously implicitly accepted for library `{}`,\n\
please rename the file to `src/lib.rs` or set lib.path in Cargo.toml",
- legacy_path.display(), lib.name()
+ legacy_path.display(),
+ lib.name()
));
legacy_path
} else {
- bail!("can't find library `{}`, \
- rename file to `src/lib.rs` or specify lib.path", lib.name())
+ bail!(
+ "can't find library `{}`, \
+ rename file to `src/lib.rs` or specify lib.path",
+ lib.name()
+ )
}
}
};
Ok(Some(target))
}
-fn clean_bins(toml_bins: Option<&Vec<TomlBinTarget>>,
- package_root: &Path,
- package_name: &str,
- warnings: &mut Vec<String>,
- has_lib: bool) -> CargoResult<Vec<Target>> {
+fn clean_bins(
+ toml_bins: Option<&Vec<TomlBinTarget>>,
+ package_root: &Path,
+ package_name: &str,
+ warnings: &mut Vec<String>,
+ has_lib: bool,
+) -> CargoResult<Vec<Target>> {
let inferred = inferred_bins(package_root, package_name);
let bins = match toml_bins {
Some(bins) => bins.clone(),
- None => inferred.iter().map(|&(ref name, ref path)| {
- TomlTarget {
+ None => inferred
+ .iter()
+ .map(|&(ref name, ref path)| TomlTarget {
name: Some(name.clone()),
path: Some(PathValue(path.clone())),
..TomlTarget::new()
- }
- }).collect()
+ })
+ .collect(),
};
for bin in &bins {
warnings.push(format!(
"path `{}` was erroneously implicitly accepted for binary `{}`,\n\
please set bin.path in Cargo.toml",
- legacy_path.display(), bin.name()
+ legacy_path.display(),
+ bin.name()
));
Some(legacy_path)
} else {
Err(e) => bail!("{}", e),
};
- let mut target = Target::bin_target(&bin.name(), path,
- bin.required_features.clone());
+ let mut target = Target::bin_target(&bin.name(), path, bin.required_features.clone());
configure(bin, &mut target);
result.push(target);
}
}
}
-fn clean_examples(toml_examples: Option<&Vec<TomlExampleTarget>>,
- package_root: &Path,
- errors: &mut Vec<String>)
- -> CargoResult<Vec<Target>> {
-
+fn clean_examples(
+ toml_examples: Option<&Vec<TomlExampleTarget>>,
+ package_root: &Path,
+ errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
let inferred = infer_from_directory(&package_root.join("examples"));
- let targets = clean_targets("example", "example",
- toml_examples, &inferred,
- package_root, errors)?;
+ let targets = clean_targets(
+ "example",
+ "example",
+ toml_examples,
+ &inferred,
+ package_root,
+ errors,
+ )?;
let mut result = Vec::new();
for (path, toml) in targets {
let crate_types = match toml.crate_types() {
Some(kinds) => kinds.iter().map(|s| LibKind::from_str(s)).collect(),
- None => Vec::new()
+ None => Vec::new(),
};
- let mut target = Target::example_target(&toml.name(), crate_types, path,
- toml.required_features.clone());
+ let mut target = Target::example_target(
+ &toml.name(),
+ crate_types,
+ path,
+ toml.required_features.clone(),
+ );
configure(&toml, &mut target);
result.push(target);
}
Ok(result)
}
-fn clean_tests(toml_tests: Option<&Vec<TomlTestTarget>>,
- package_root: &Path,
- errors: &mut Vec<String>) -> CargoResult<Vec<Target>> {
-
+fn clean_tests(
+ toml_tests: Option<&Vec<TomlTestTarget>>,
+ package_root: &Path,
+ errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
let inferred = infer_from_directory(&package_root.join("tests"));
- let targets = clean_targets("test", "test",
- toml_tests, &inferred,
- package_root, errors)?;
+ let targets = clean_targets("test", "test", toml_tests, &inferred, package_root, errors)?;
let mut result = Vec::new();
for (path, toml) in targets {
- let mut target = Target::test_target(&toml.name(), path,
- toml.required_features.clone());
+ let mut target = Target::test_target(&toml.name(), path, toml.required_features.clone());
configure(&toml, &mut target);
result.push(target);
}
Ok(result)
}
-fn clean_benches(toml_benches: Option<&Vec<TomlBenchTarget>>,
- package_root: &Path,
- warnings: &mut Vec<String>,
- errors: &mut Vec<String>) -> CargoResult<Vec<Target>> {
+fn clean_benches(
+ toml_benches: Option<&Vec<TomlBenchTarget>>,
+ package_root: &Path,
+ warnings: &mut Vec<String>,
+ errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
let mut legacy_bench_path = |bench: &TomlTarget| {
let legacy_path = package_root.join("src").join("bench.rs");
if !(bench.name() == "bench" && legacy_path.exists()) {
warnings.push(format!(
"path `{}` was erroneously implicitly accepted for benchmark `{}`,\n\
please set bench.path in Cargo.toml",
- legacy_path.display(), bench.name()
+ legacy_path.display(),
+ bench.name()
));
Some(legacy_path)
};
let inferred = infer_from_directory(&package_root.join("benches"));
- let targets = clean_targets_with_legacy_path("benchmark", "bench",
- toml_benches, &inferred,
- package_root,
- errors,
- &mut legacy_bench_path)?;
+ let targets = clean_targets_with_legacy_path(
+ "benchmark",
+ "bench",
+ toml_benches,
+ &inferred,
+ package_root,
+ errors,
+ &mut legacy_bench_path,
+ )?;
let mut result = Vec::new();
for (path, toml) in targets {
- let mut target = Target::bench_target(&toml.name(), path,
- toml.required_features.clone());
+ let mut target = Target::bench_target(&toml.name(), path, toml.required_features.clone());
configure(&toml, &mut target);
result.push(target);
}
Ok(result)
}
-fn clean_targets(target_kind_human: &str, target_kind: &str,
- toml_targets: Option<&Vec<TomlTarget>>,
- inferred: &[(String, PathBuf)],
- package_root: &Path,
- errors: &mut Vec<String>)
- -> CargoResult<Vec<(PathBuf, TomlTarget)>> {
- clean_targets_with_legacy_path(target_kind_human, target_kind,
- toml_targets,
- inferred,
- package_root,
- errors,
- &mut |_| None)
+fn clean_targets(
+ target_kind_human: &str,
+ target_kind: &str,
+ toml_targets: Option<&Vec<TomlTarget>>,
+ inferred: &[(String, PathBuf)],
+ package_root: &Path,
+ errors: &mut Vec<String>,
+) -> CargoResult<Vec<(PathBuf, TomlTarget)>> {
+ clean_targets_with_legacy_path(
+ target_kind_human,
+ target_kind,
+ toml_targets,
+ inferred,
+ package_root,
+ errors,
+ &mut |_| None,
+ )
}
-fn clean_targets_with_legacy_path(target_kind_human: &str, target_kind: &str,
- toml_targets: Option<&Vec<TomlTarget>>,
- inferred: &[(String, PathBuf)],
- package_root: &Path,
- errors: &mut Vec<String>,
- legacy_path: &mut FnMut(&TomlTarget) -> Option<PathBuf>)
- -> CargoResult<Vec<(PathBuf, TomlTarget)>> {
+fn clean_targets_with_legacy_path(
+ target_kind_human: &str,
+ target_kind: &str,
+ toml_targets: Option<&Vec<TomlTarget>>,
+ inferred: &[(String, PathBuf)],
+ package_root: &Path,
+ errors: &mut Vec<String>,
+ legacy_path: &mut FnMut(&TomlTarget) -> Option<PathBuf>,
+) -> CargoResult<Vec<(PathBuf, TomlTarget)>> {
let toml_targets = match toml_targets {
Some(targets) => targets.clone(),
- None => inferred.iter().map(|&(ref name, ref path)| {
- TomlTarget {
+ None => inferred
+ .iter()
+ .map(|&(ref name, ref path)| TomlTarget {
name: Some(name.clone()),
path: Some(PathValue(path.clone())),
..TomlTarget::new()
- }
- }).collect()
+ })
+ .collect(),
};
for target in &toml_targets {
Ok(path) => path,
Err(e) => {
errors.push(e);
- continue
- },
+ continue;
+ }
};
result.push((path, target));
}
Ok(result)
}
-
fn inferred_lib(package_root: &Path) -> Option<PathBuf> {
let lib = package_root.join("src").join("lib.rs");
if fs::metadata(&lib).is_ok() {
fn infer_from_directory(directory: &Path) -> Vec<(String, PathBuf)> {
let entries = match fs::read_dir(directory) {
Err(_) => return Vec::new(),
- Ok(dir) => dir
+ Ok(dir) => dir,
};
entries
.collect()
}
-
fn infer_any(entry: &DirEntry) -> Option<(String, PathBuf)> {
if entry.path().extension().and_then(|p| p.to_str()) == Some("rs") {
infer_file(entry)
}
}
-
fn infer_file(entry: &DirEntry) -> Option<(String, PathBuf)> {
let path = entry.path();
- path
- .file_stem()
+ path.file_stem()
.and_then(|p| p.to_str())
.map(|p| (p.to_owned(), path.clone()))
}
-
fn infer_subdirectory(entry: &DirEntry) -> Option<(String, PathBuf)> {
let path = entry.path();
let main = path.join("main.rs");
let name = path.file_name().and_then(|n| n.to_str());
match (name, main.exists()) {
(Some(name), true) => Some((name.to_owned(), main)),
- _ => None
+ _ => None,
}
}
-
fn is_not_dotfile(entry: &DirEntry) -> bool {
entry.file_name().to_str().map(|s| s.starts_with('.')) == Some(false)
}
-
-fn validate_has_name(target: &TomlTarget,
- target_kind_human: &str,
- target_kind: &str) -> CargoResult<()> {
+fn validate_has_name(
+ target: &TomlTarget,
+ target_kind_human: &str,
+ target_kind: &str,
+) -> CargoResult<()> {
match target.name {
Some(ref name) => if name.trim().is_empty() {
bail!("{} target names cannot be empty", target_kind_human)
},
- None => bail!("{} target {}.name is required", target_kind_human, target_kind)
+ None => bail!(
+ "{} target {}.name is required",
+ target_kind_human,
+ target_kind
+ ),
}
Ok(())
let mut seen = HashSet::new();
for name in targets.iter().map(|e| e.name()) {
if !seen.insert(name.clone()) {
- bail!("found duplicate {target_kind} name {name}, \
- but all {target_kind} targets must have a unique name",
- target_kind = target_kind, name = name);
+ bail!(
+ "found duplicate {target_kind} name {name}, \
+ but all {target_kind} targets must have a unique name",
+ target_kind = target_kind,
+ name = name
+ );
}
}
Ok(())
}
-
fn configure(toml: &TomlTarget, target: &mut Target) {
let t2 = target.clone();
- target.set_tested(toml.test.unwrap_or_else(|| t2.tested()))
+ target
+ .set_tested(toml.test.unwrap_or_else(|| t2.tested()))
.set_doc(toml.doc.unwrap_or_else(|| t2.documented()))
.set_doctest(toml.doctest.unwrap_or_else(|| t2.doctested()))
.set_benched(toml.bench.unwrap_or_else(|| t2.benched()))
});
}
-fn target_path(target: &TomlTarget,
- inferred: &[(String, PathBuf)],
- target_kind: &str,
- package_root: &Path,
- legacy_path: &mut FnMut(&TomlTarget) -> Option<PathBuf>) -> Result<PathBuf, String> {
+fn target_path(
+ target: &TomlTarget,
+ inferred: &[(String, PathBuf)],
+ target_kind: &str,
+ package_root: &Path,
+ legacy_path: &mut FnMut(&TomlTarget) -> Option<PathBuf>,
+) -> Result<PathBuf, String> {
if let Some(ref path) = target.path {
// Should we verify that this path exists here?
return Ok(package_root.join(&path.0));
}
let name = target.name();
- let mut matching = inferred.iter()
+ let mut matching = inferred
+ .iter()
.filter(|&&(ref n, _)| n == &name)
.map(|&(_, ref p)| p.clone());
if let Some(path) = legacy_path(target) {
return Ok(path);
}
- Err(format!("can't find `{name}` {target_kind}, specify {target_kind}.path",
- name = name, target_kind = target_kind))
+ Err(format!(
+ "can't find `{name}` {target_kind}, specify {target_kind}.path",
+ name = name,
+ target_kind = target_kind
+ ))
}
- (None, Some(_)) => unreachable!()
+ (None, Some(_)) => unreachable!(),
}
}
use git2;
-use util::{CargoResult, process};
+use util::{process, CargoResult};
pub struct HgRepo;
pub struct GitRepo;
git2::Repository::init(path)?;
Ok(GitRepo)
}
- pub fn discover(path: &Path, _: &Path) -> Result<git2::Repository,git2::Error> {
+ pub fn discover(path: &Path, _: &Path) -> Result<git2::Repository, git2::Error> {
git2::Repository::discover(path)
}
}
Ok(HgRepo)
}
pub fn discover(path: &Path, cwd: &Path) -> CargoResult<HgRepo> {
- process("hg").cwd(cwd).arg("root").cwd(path).exec_with_output()?;
+ process("hg")
+ .cwd(cwd)
+ .arg("root")
+ .cwd(path)
+ .exec_with_output()?;
Ok(HgRepo)
}
}
process("fossil").cwd(cwd).arg("init").arg(&db_path).exec()?;
// open it in that new directory
- process("fossil").cwd(&path).arg("open").arg(db_fname).exec()?;
+ process("fossil")
+ .cwd(&path)
+ .arg("open")
+ .arg(db_fname)
+ .exec()?;
// set `target` as ignoreable and cleanable
- process("fossil").cwd(cwd).arg("settings")
+ process("fossil")
+ .cwd(cwd)
+ .arg("settings")
.arg("ignore-glob")
.arg("target");
- process("fossil").cwd(cwd).arg("settings")
+ process("fossil")
+ .cwd(cwd)
+ .arg("settings")
.arg("clean-glob")
.arg("target");
Ok(FossilRepo)
#![allow(unknown_lints)]
extern crate curl;
-extern crate url;
#[macro_use]
extern crate failure;
-extern crate serde_json;
#[macro_use]
extern crate serde_derive;
+extern crate serde_json;
+extern crate url;
use std::collections::BTreeMap;
use std::fs::File;
pub license_file: Option<String>,
pub repository: Option<String>,
pub badges: BTreeMap<String, BTreeMap<String, String>>,
- #[serde(default)]
- pub links: Option<String>,
+ #[serde(default)] pub links: Option<String>,
}
#[derive(Serialize)]
pub version_req: String,
pub target: Option<String>,
pub kind: String,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub registry: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")] pub registry: Option<String>,
}
#[derive(Deserialize)]
pub invalid_badges: Vec<String>,
}
-#[derive(Deserialize)] struct R { ok: bool }
-#[derive(Deserialize)] struct OwnerResponse { ok: bool, msg: String }
-#[derive(Deserialize)] struct ApiErrorList { errors: Vec<ApiError> }
-#[derive(Deserialize)] struct ApiError { detail: String }
-#[derive(Serialize)] struct OwnersReq<'a> { users: &'a [&'a str] }
-#[derive(Deserialize)] struct Users { users: Vec<User> }
-#[derive(Deserialize)] struct TotalCrates { total: u32 }
-#[derive(Deserialize)] struct Crates { crates: Vec<Crate>, meta: TotalCrates }
+#[derive(Deserialize)]
+struct R {
+ ok: bool,
+}
+#[derive(Deserialize)]
+struct OwnerResponse {
+ ok: bool,
+ msg: String,
+}
+#[derive(Deserialize)]
+struct ApiErrorList {
+ errors: Vec<ApiError>,
+}
+#[derive(Deserialize)]
+struct ApiError {
+ detail: String,
+}
+#[derive(Serialize)]
+struct OwnersReq<'a> {
+ users: &'a [&'a str],
+}
+#[derive(Deserialize)]
+struct Users {
+ users: Vec<User>,
+}
+#[derive(Deserialize)]
+struct TotalCrates {
+ total: u32,
+}
+#[derive(Deserialize)]
+struct Crates {
+ crates: Vec<Crate>,
+ meta: TotalCrates,
+}
impl Registry {
pub fn new(host: String, token: Option<String>) -> Registry {
Registry::new_handle(host, token, Easy::new())
}
- pub fn new_handle(host: String,
- token: Option<String>,
- handle: Easy) -> Registry {
+ pub fn new_handle(host: String, token: Option<String>, handle: Easy) -> Registry {
Registry {
host,
token,
pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<String> {
let body = serde_json::to_string(&OwnersReq { users: owners })?;
- let body = self.put(format!("/crates/{}/owners", krate),
- body.as_bytes())?;
+ let body = self.put(format!("/crates/{}/owners", krate), body.as_bytes())?;
assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
Ok(serde_json::from_str::<OwnerResponse>(&body)?.msg)
}
pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
let body = serde_json::to_string(&OwnersReq { users: owners })?;
- let body = self.delete(format!("/crates/{}/owners", krate),
- Some(body.as_bytes()))?;
+ let body = self.delete(format!("/crates/{}/owners", krate), Some(body.as_bytes()))?;
assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
Ok(())
}
Ok(serde_json::from_str::<Users>(&body)?.users)
}
- pub fn publish(&mut self, krate: &NewCrate, tarball: &File)
- -> Result<Warnings> {
+ pub fn publish(&mut self, krate: &NewCrate, tarball: &File) -> Result<Warnings> {
let json = serde_json::to_string(krate)?;
// Prepare the body. The format of the upload request is:
//
let stat = tarball.metadata()?;
let header = {
let mut w = Vec::new();
- w.extend([
- (json.len() >> 0) as u8,
- (json.len() >> 8) as u8,
- (json.len() >> 16) as u8,
- (json.len() >> 24) as u8,
- ].iter().map(|x| *x));
+ w.extend(
+ [
+ (json.len() >> 0) as u8,
+ (json.len() >> 8) as u8,
+ (json.len() >> 16) as u8,
+ (json.len() >> 24) as u8,
+ ].iter()
+ .map(|x| *x),
+ );
w.extend(json.as_bytes().iter().map(|x| *x));
- w.extend([
- (stat.len() >> 0) as u8,
- (stat.len() >> 8) as u8,
- (stat.len() >> 16) as u8,
- (stat.len() >> 24) as u8,
- ].iter().map(|x| *x));
+ w.extend(
+ [
+ (stat.len() >> 0) as u8,
+ (stat.len() >> 8) as u8,
+ (stat.len() >> 16) as u8,
+ (stat.len() >> 24) as u8,
+ ].iter()
+ .map(|x| *x),
+ );
w
};
let size = stat.len() as usize + header.len();
let formatted_query = percent_encode(query.as_bytes(), QUERY_ENCODE_SET);
let body = self.req(
format!("/crates?q={}&per_page={}", formatted_query, limit),
- None, Auth::Unauthorized
+ None,
+ Auth::Unauthorized,
)?;
let crates = serde_json::from_str::<Crates>(&body)?;
}
pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> {
- let body = self.delete(format!("/crates/{}/{}/yank", krate, version),
- None)?;
+ let body = self.delete(format!("/crates/{}/{}/yank", krate, version), None)?;
assert!(serde_json::from_str::<R>(&body)?.ok);
Ok(())
}
pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> {
- let body = self.put(format!("/crates/{}/{}/unyank", krate, version),
- &[])?;
+ let body = self.put(format!("/crates/{}/{}/unyank", krate, version), &[])?;
assert!(serde_json::from_str::<R>(&body)?.ok);
Ok(())
}
self.req(path, b, Auth::Authorized)
}
- fn req(&mut self,
- path: String,
- body: Option<&[u8]>,
- authorized: Auth) -> Result<String> {
+ fn req(&mut self, path: String, body: Option<&[u8]>, authorized: Auth) -> Result<String> {
self.handle.url(&format!("{}/api/v1{}", self.host, path))?;
let mut headers = List::new();
headers.append("Accept: application/json")?;
}
}
-fn handle(handle: &mut Easy,
- read: &mut FnMut(&mut [u8]) -> usize) -> Result<String> {
+fn handle(handle: &mut Easy, read: &mut FnMut(&mut [u8]) -> usize) -> Result<String> {
let mut headers = Vec::new();
let mut body = Vec::new();
{
200 => {}
403 => bail!("received 403 unauthorized response code"),
404 => bail!("received 404 not found response code"),
- code => {
- bail!("failed to get a 200 OK response, got {}\n\
- headers:\n\
- \t{}\n\
- body:\n\
- {}",
- code,
- headers.join("\n\t"),
- String::from_utf8_lossy(&body))
- }
+ code => bail!(
+ "failed to get a 200 OK response, got {}\n\
+ headers:\n\
+ \t{}\n\
+ body:\n\
+ {}",
+ code,
+ headers.join("\n\t"),
+ String::from_utf8_lossy(&body)
+ ),
}
let body = match String::from_utf8(body) {
use cargotest::ChannelChanger;
-use cargotest::support::registry::{self, Package, alt_api_path};
-use cargotest::support::{paths, project, execs};
+use cargotest::support::registry::{self, alt_api_path, Package};
+use cargotest::support::{execs, paths, project};
use hamcrest::assert_that;
use std::fs::File;
use std::io::Write;
#[test]
fn is_feature_gated() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
version = "0.0.1"
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").alternative(true).publish();
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(101)
- .with_stderr_contains(" feature `alternative-registries` is required"));
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs()
+ .with_status(101)
+ .with_stderr_contains(" feature `alternative-registries` is required"),
+ );
}
#[test]
fn depend_on_alt_registry() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
[dependencies.bar]
version = "0.0.1"
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").alternative(true).publish();
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `{reg}`
[DOWNLOADING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url(),
- reg = registry::alt_registry())));
+ dir = p.url(),
+ reg = registry::alt_registry()
+ )),
+ );
- assert_that(p.cargo("clean").masquerade_as_nightly_cargo(), execs().with_status(0));
+ assert_that(
+ p.cargo("clean").masquerade_as_nightly_cargo(),
+ execs().with_status(0),
+ );
// Don't download a second time
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn depend_on_alt_registry_depends_on_same_registry_no_index() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
[dependencies.bar]
version = "0.0.1"
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("baz", "0.0.1").alternative(true).publish();
- Package::new("bar", "0.0.1").dep("baz", "0.0.1").alternative(true).publish();
-
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(0).with_stderr(&format!("\
+ Package::new("bar", "0.0.1")
+ .dep("baz", "0.0.1")
+ .alternative(true)
+ .publish();
+
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `{reg}`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url(),
- reg = registry::alt_registry())));
+ dir = p.url(),
+ reg = registry::alt_registry()
+ )),
+ );
}
#[test]
fn depend_on_alt_registry_depends_on_same_registry() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
[dependencies.bar]
version = "0.0.1"
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("baz", "0.0.1").alternative(true).publish();
- Package::new("bar", "0.0.1").registry_dep("baz", "0.0.1", registry::alt_registry().as_str()).alternative(true).publish();
-
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(0).with_stderr(&format!("\
+ Package::new("bar", "0.0.1")
+ .registry_dep("baz", "0.0.1", registry::alt_registry().as_str())
+ .alternative(true)
+ .publish();
+
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `{reg}`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url(),
- reg = registry::alt_registry())));
+ dir = p.url(),
+ reg = registry::alt_registry()
+ )),
+ );
}
#[test]
fn depend_on_alt_registry_depends_on_crates_io() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
[dependencies.bar]
version = "0.0.1"
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("baz", "0.0.1").publish();
- Package::new("bar", "0.0.1").registry_dep("baz", "0.0.1", registry::registry().as_str()).alternative(true).publish();
-
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(0).with_stderr(&format!("\
+ Package::new("bar", "0.0.1")
+ .registry_dep("baz", "0.0.1", registry::registry().as_str())
+ .alternative(true)
+ .publish();
+
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `{alt_reg}`
[UPDATING] registry `{reg}`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url(),
- alt_reg = registry::alt_registry(),
- reg = registry::registry())));
+ dir = p.url(),
+ alt_reg = registry::alt_registry(),
+ reg = registry::registry()
+ )),
+ );
}
#[test]
registry::init();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
[dependencies.bar]
path = "bar"
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 ({dir}/bar)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
registry::init();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
[dependencies.bar]
git = ""
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
#[test]
fn cannot_publish_to_crates_io_with_registry_dependency() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
name = "foo"
[dependencies.bar]
version = "0.0.1"
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").alternative(true).publish();
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--index").arg(registry::registry().to_string()),
- execs().with_status(101));
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--index")
+ .arg(registry::registry().to_string()),
+ execs().with_status(101),
+ );
}
#[test]
fn publish_with_registry_dependency() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
[dependencies.bar]
version = "0.0.1"
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").alternative(true).publish();
// Login so that we have the token available
- assert_that(p.cargo("login").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("TOKEN").arg("-Zunstable-options"),
- execs().with_status(0));
-
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("-Zunstable-options"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("login")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("TOKEN")
+ .arg("-Zunstable-options"),
+ execs().with_status(0),
+ );
+
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs().with_status(0),
+ );
}
#[test]
fn alt_registry_and_crates_io_deps() {
-
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
[dependencies.alt_reg_dep]
version = "0.1.0"
registry = "alternative"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("crates_io_dep", "0.0.1").publish();
- Package::new("alt_reg_dep", "0.1.0").alternative(true).publish();
-
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(0)
- .with_stderr_contains(format!("\
-[UPDATING] registry `{}`", registry::alt_registry()))
- .with_stderr_contains(&format!("\
-[UPDATING] registry `{}`", registry::registry()))
- .with_stderr_contains("\
-[DOWNLOADING] crates_io_dep v0.0.1 (registry `file://[..]`)")
- .with_stderr_contains("\
-[DOWNLOADING] alt_reg_dep v0.1.0 (registry `file://[..]`)")
- .with_stderr_contains("\
-[COMPILING] alt_reg_dep v0.1.0 (registry `file://[..]`)")
- .with_stderr_contains("\
-[COMPILING] crates_io_dep v0.0.1")
- .with_stderr_contains(&format!("\
-[COMPILING] foo v0.0.1 ({})", p.url()))
- .with_stderr_contains("\
-[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs"))
-
+ Package::new("alt_reg_dep", "0.1.0")
+ .alternative(true)
+ .publish();
+
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(format!(
+ "\
+ [UPDATING] registry `{}`",
+ registry::alt_registry()
+ ))
+ .with_stderr_contains(&format!(
+ "\
+ [UPDATING] registry `{}`",
+ registry::registry()
+ ))
+ .with_stderr_contains(
+ "\
+ [DOWNLOADING] crates_io_dep v0.0.1 (registry `file://[..]`)",
+ )
+ .with_stderr_contains(
+ "\
+ [DOWNLOADING] alt_reg_dep v0.1.0 (registry `file://[..]`)",
+ )
+ .with_stderr_contains(
+ "\
+ [COMPILING] alt_reg_dep v0.1.0 (registry `file://[..]`)",
+ )
+ .with_stderr_contains(
+ "\
+ [COMPILING] crates_io_dep v0.0.1",
+ )
+ .with_stderr_contains(&format!(
+ "\
+ [COMPILING] foo v0.0.1 ({})",
+ p.url()
+ ))
+ .with_stderr_contains(
+ "\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs",
+ ),
+ )
}
#[test]
fn block_publish_due_to_no_token() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").alternative(true).publish();
// Now perform the actual publish
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("-Zunstable-options"),
- execs().with_status(101)
- .with_stderr_contains("error: no upload token found, please run `cargo login`"));
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains("error: no upload token found, please run `cargo login`"),
+ );
}
#[test]
fn publish_to_alt_registry() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").alternative(true).publish();
// Login so that we have the token available
- assert_that(p.cargo("login").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("TOKEN").arg("-Zunstable-options"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("login")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("TOKEN")
+ .arg("-Zunstable-options"),
+ execs().with_status(0),
+ );
// Now perform the actual publish
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("-Zunstable-options"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs().with_status(0),
+ );
// Ensure that the crate is uploaded
assert!(alt_api_path().join("api/v1/crates/new").exists());
#[test]
fn publish_with_crates_io_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
[dependencies.bar]
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").publish();
// Login so that we have the token available
- assert_that(p.cargo("login").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("TOKEN").arg("-Zunstable-options"),
- execs().with_status(0));
-
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("-Zunstable-options"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("login")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("TOKEN")
+ .arg("-Zunstable-options"),
+ execs().with_status(0),
+ );
+
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs().with_status(0),
+ );
}
#[test]
File::create(config)
.unwrap()
- .write_all(br#"
+ .write_all(
+ br#"
[registries.alternative]
index = "ssh://git:secret@foobar.com"
- "#)
+ "#,
+ )
.unwrap();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("-Zunstable-options"),
- execs().with_status(101)
- .with_stderr_contains("error: Registry URLs may not contain credentials"));
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains("error: Registry URLs may not contain credentials"),
+ );
}
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use cargotest::support::registry::Package;
use hamcrest::assert_that;
#[test]
fn bad1() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[target]
nonexistent-target = "foo"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v")
- .arg("--target=nonexistent-target"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .arg("-v")
+ .arg("--target=nonexistent-target"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] expected table for configuration key `target.nonexistent-target`, \
but found string in [..]config
-"));
+",
+ ),
+ );
}
#[test]
fn bad2() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[http]
proxy = 3.0
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("publish").arg("-v"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] Couldn't load Cargo configuration
Caused by:
Caused by:
found TOML configuration value of unknown type `float`
-"));
+",
+ ),
+ );
}
#[test]
fn bad3() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[http]
proxy = true
- "#)
+ "#,
+ )
.build();
Package::new("foo", "1.0.0").publish();
- assert_that(p.cargo("publish").arg("-v"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to update registry [..]
Caused by:
invalid configuration for key `http.proxy`
expected a string, but found a boolean for `http.proxy` in [..]config
-"));
+",
+ ),
+ );
}
#[test]
fn bad4() {
let p = project("foo")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[cargo-new]
name = false
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("new").arg("-v").arg("foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("new").arg("-v").arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] Failed to create project `foo` at `[..]`
Caused by:
invalid configuration for key `cargo-new.name`
expected a string, but found a boolean for `cargo-new.name` in [..]config
-"));
+",
+ ),
+ );
}
#[test]
fn bad5() {
let p = project("foo")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
foo = ""
- "#)
- .file("foo/.cargo/config", r#"
+ "#,
+ )
+ .file(
+ "foo/.cargo/config",
+ r#"
foo = 2
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("new")
- .arg("-v").arg("foo").cwd(&p.root().join("foo")),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("new")
+ .arg("-v")
+ .arg("foo")
+ .cwd(&p.root().join("foo")),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] Failed to create project `foo` at `[..]`
Caused by:
Caused by:
expected integer, but found string
-"));
+",
+ ),
+ );
}
#[test]
fn bad_cargo_config_jobs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
jobs = -1
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] build.jobs must be positive, but found -1 in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn default_cargo_config_jobs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
jobs = 1
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn good_cargo_config_jobs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
jobs = 4
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn invalid_global_config() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file(".cargo/config", "4")
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] Couldn't load Cargo configuration
Caused by:
Caused by:
expected an equals, found eof at line 1
-"));
+",
+ ),
+ );
}
#[test]
fn bad_cargo_lock() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("Cargo.lock", "[[package]]\nfoo = 92")
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse lock file at: [..]Cargo.lock
Caused by:
missing field `name` for key `package`
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("Cargo.lock", r#"
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "bar"
version = "0.0.1"
name = "foo"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("--verbose"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--verbose"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse lock file at: [..]
Caused by:
package `foo` is specified twice in the lockfile
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("Cargo.lock", r#"
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "bar"
version = "0.0.1"
name = "foo"
version = "0.1.0"
source = "You shall not parse"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("--verbose"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--verbose"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse lock file at: [..]
Caused by:
invalid source `You shall not parse` for key `package.source`
-"));
+",
+ ),
+ );
}
#[test]
fn bad_dependency_in_lockfile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("Cargo.lock", r#"
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "foo"
version = "0.0.1"
dependencies = [
"bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("--verbose"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--verbose"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse lock file at: [..]
Caused by:
package `bar 0.1.0 ([..])` is specified as a dependency, but is missing from the package list
-"));
-
+",
+ ),
+ );
}
#[test]
fn bad_git_dependency() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies]
foo = { git = "file:.." }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] git repository `file:///`
[ERROR] failed to load source for a dependency on `foo`
Caused by:
[..]'file:///' is not a valid local file URI[..]
-"));
+",
+ ),
+ );
}
#[test]
fn bad_crate_type() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[lib]
crate-type = ["bad_type", "rlib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: failed to run `rustc` to learn about target-specific information
-"));
+",
+ ),
+ );
}
#[test]
fn malformed_override() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
native = {
foo: "bar"
}
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Caused by:
expected a table key, found a newline at line 8
-"));
+",
+ ),
+ );
}
#[test]
fn duplicate_binary_names() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "qqq"
version = "0.1.0"
[[bin]]
name = "e"
path = "b.rs"
- "#)
+ "#,
+ )
.file("a.rs", r#"fn main() -> () {}"#)
.file("b.rs", r#"fn main() -> () {}"#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
found duplicate binary name e, but all binary targets must have a unique name
-"));
+",
+ ),
+ );
}
#[test]
fn duplicate_example_names() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "qqq"
version = "0.1.0"
[[example]]
name = "ex"
path = "examples/ex2.rs"
- "#)
+ "#,
+ )
.file("examples/ex.rs", r#"fn main () -> () {}"#)
.file("examples/ex2.rs", r#"fn main () -> () {}"#)
.build();
- assert_that(p.cargo("build").arg("--example").arg("ex"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--example").arg("ex"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
found duplicate example name ex, but all example targets must have a unique name
-"));
+",
+ ),
+ );
}
#[test]
fn duplicate_bench_names() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "qqq"
version = "0.1.0"
[[bench]]
name = "ex"
path = "benches/ex2.rs"
- "#)
+ "#,
+ )
.file("benches/ex.rs", r#"fn main () {}"#)
.file("benches/ex2.rs", r#"fn main () {}"#)
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("bench"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
found duplicate bench name ex, but all bench targets must have a unique name
-"));
+",
+ ),
+ );
}
#[test]
fn duplicate_deps() {
let p = project("foo")
- .file("shim-bar/Cargo.toml", r#"
+ .file(
+ "shim-bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("shim-bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "shim-bar/src/lib.rs",
+ r#"
pub fn a() {}
- "#)
- .file("linux-bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "linux-bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("linux-bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "linux-bar/src/lib.rs",
+ r#"
pub fn a() {}
- "#)
- .file("Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "qqq"
version = "0.0.1"
[target.x86_64-unknown-linux-gnu.dependencies]
bar = { path = "linux-bar" }
- "#)
+ "#,
+ )
.file("src/main.rs", r#"fn main () {}"#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Dependency 'bar' has different source paths depending on the build target. Each dependency must \
have a single canonical source path irrespective of build target.
-"));
+",
+ ),
+ );
}
#[test]
fn duplicate_deps_diff_sources() {
let p = project("foo")
- .file("shim-bar/Cargo.toml", r#"
+ .file(
+ "shim-bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("shim-bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "shim-bar/src/lib.rs",
+ r#"
pub fn a() {}
- "#)
- .file("linux-bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "linux-bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("linux-bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "linux-bar/src/lib.rs",
+ r#"
pub fn a() {}
- "#)
- .file("Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "qqq"
version = "0.0.1"
[target.x86_64-unknown-linux-gnu.dependencies]
bar = { path = "linux-bar" }
- "#)
+ "#,
+ )
.file("src/main.rs", r#"fn main () {}"#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Dependency 'bar' has different source paths depending on the build target. Each dependency must \
have a single canonical source path irrespective of build target.
-"));
+",
+ ),
+ );
}
#[test]
fn unused_keys() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[target.foo]
bar = "3"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
warning: unused manifest key: target.foo.bar
[COMPILING] foo v0.1.0 (file:///[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
bulid = "foo"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
warning: unused manifest key: project.bulid
[COMPILING] foo [..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[lib]
build = "foo"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
warning: unused manifest key: lib.build
[COMPILING] foo [..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
-
#[test]
fn empty_dependencies() {
let p = project("empty_deps")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "empty_deps"
version = "0.0.0"
[dependencies]
foo = {}
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("foo", "0.0.1").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr_contains(
+ "\
warning: dependency (foo) specified without providing a local path, Git repository, or version \
to use. This will be considered an error in future versions
-"));
+",
+ ),
+ );
}
#[test]
fn invalid_toml_historically_allowed_is_warned() {
let p = project("empty_deps")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "empty_deps"
version = "0.0.0"
authors = []
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[foo] bar = 2
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
warning: TOML file found which contains invalid syntax and will soon not parse
at `[..]config`.
in the future.
[COMPILING] empty_deps v0.0.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn ambiguous_git_reference() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
git = "https://127.0.0.1"
branch = "master"
tag = "some-tag"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_stderr_contains(
+ "\
[WARNING] dependency (bar) specification is ambiguous. \
Only one of `branch`, `tag` or `rev` is allowed. \
This will be considered an error in future versions
-"));
+",
+ ),
+ );
}
#[test]
fn bad_source_config1() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[source.foo]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: no source URL specified for `source.foo`, need [..]
-"));
+",
+ ),
+ );
}
#[test]
fn bad_source_config2() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[source.crates-io]
registry = 'http://example.com'
replace-with = 'bar'
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to load source for a dependency on `bar`
Caused by:
Caused by:
could not find a configured source with the name `bar` \
when attempting to lookup `crates-io` (configuration in [..])
-"));
+",
+ ),
+ );
}
#[test]
fn bad_source_config3() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[source.crates-io]
registry = 'http://example.com'
replace-with = 'crates-io'
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to load source for a dependency on `bar`
Caused by:
Caused by:
detected a cycle of `replace-with` sources, [..]
-"));
+",
+ ),
+ );
}
#[test]
fn bad_source_config4() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[source.crates-io]
registry = 'http://example.com'
replace-with = 'bar'
[source.bar]
registry = 'http://example.com'
replace-with = 'crates-io'
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to load source for a dependency on `bar`
Caused by:
Caused by:
detected a cycle of `replace-with` sources, the source `crates-io` is \
eventually replaced with itself (configuration in [..])
-"));
+",
+ ),
+ );
}
#[test]
fn bad_source_config5() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[source.crates-io]
registry = 'http://example.com'
replace-with = 'bar'
[source.bar]
registry = 'not a url'
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: configuration key `source.bar.registry` specified an invalid URL (in [..])
Caused by:
invalid url `not a url`: [..]
-"));
+",
+ ),
+ );
}
#[test]
fn both_git_and_path_specified() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies.bar]
git = "https://127.0.0.1"
path = "bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(foo.cargo("build").arg("-v"),
- execs().with_stderr_contains("\
+ assert_that(
+ foo.cargo("build").arg("-v"),
+ execs().with_stderr_contains(
+ "\
[WARNING] dependency (bar) specification is ambiguous. \
Only one of `git` or `path` is allowed. \
This will be considered an error in future versions
-"));
+",
+ ),
+ );
}
#[test]
fn bad_source_config6() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[source.crates-io]
registry = 'http://example.com'
replace-with = ['not', 'a', 'string']
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: expected a string, but found a array for `source.crates-io.replace-with` in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn ignored_git_revision() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies.bar]
path = "bar"
branch = "spam"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(foo.cargo("build").arg("-v"),
- execs().with_stderr_contains("\
-[WARNING] key `branch` is ignored for dependency (bar). \
-This will be considered an error in future versions"));
+ assert_that(
+ foo.cargo("build").arg("-v"),
+ execs().with_stderr_contains(
+ "\
+ [WARNING] key `branch` is ignored for dependency (bar). \
+ This will be considered an error in future versions",
+ ),
+ );
}
#[test]
fn bad_source_config7() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[source.foo]
registry = 'http://example.com'
local-registry = 'file:///another/file'
- "#)
+ "#,
+ )
.build();
Package::new("bar", "0.1.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: more than one source URL specified for `source.foo`
-"));
+",
+ ),
+ );
}
#[test]
fn bad_dependency() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies]
bar = 3
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
invalid type: integer `3`, expected a version string like [..]
-"));
+",
+ ),
+ );
}
#[test]
fn bad_debuginfo() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[profile.dev]
debug = 'a'
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
invalid type: string \"a\", expected a boolean or an integer for [..]
-"));
+",
+ ),
+ );
}
#[test]
fn bad_opt_level() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = 3
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
invalid type: integer `3`, expected a boolean or a string for key [..]
-"));
+",
+ ),
+ );
}
-use cargotest::support::{project, execs, main_file, basic_bin_manifest};
-use hamcrest::{assert_that};
+use cargotest::support::{basic_bin_manifest, execs, main_file, project};
+use hamcrest::assert_that;
fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) {
let p = project("foo")
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo(command)
- .arg("--manifest-path").arg(manifest_path_argument)
- .cwd(p.root().parent().unwrap()),
- execs().with_status(101)
- .with_stderr("[ERROR] the manifest-path must be a path \
- to a Cargo.toml file"));
+ assert_that(
+ p.cargo(command)
+ .arg("--manifest-path")
+ .arg(manifest_path_argument)
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(101).with_stderr(
+ "[ERROR] the manifest-path must be a path \
+ to a Cargo.toml file",
+ ),
+ );
}
-
fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) {
let p = project("foo").build();
let expected_path = manifest_path_argument
- .split('/').collect::<Vec<_>>().join("[..]");
+ .split('/')
+ .collect::<Vec<_>>()
+ .join("[..]");
- assert_that(p.cargo(command)
- .arg("--manifest-path").arg(manifest_path_argument)
- .cwd(p.root().parent().unwrap()),
- execs().with_status(101)
- .with_stderr(
- format!("[ERROR] manifest path `{}` does not exist",
- expected_path)
- ));
+ assert_that(
+ p.cargo(command)
+ .arg("--manifest-path")
+ .arg(manifest_path_argument)
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(101).with_stderr(format!(
+ "[ERROR] manifest path `{}` does not exist",
+ expected_path
+ )),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("verify-project")
- .arg("--manifest-path").arg("foo")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(1)
- .with_stdout("\
-{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
- "));
+ assert_that(
+ p.cargo("verify-project")
+ .arg("--manifest-path")
+ .arg("foo")
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(1).with_stdout(
+ "\
+ {\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
+ ",
+ ),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("verify-project")
- .arg("--manifest-path").arg("foo/bar")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(1)
- .with_stdout("\
-{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
- "));
+ assert_that(
+ p.cargo("verify-project")
+ .arg("--manifest-path")
+ .arg("foo/bar")
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(1).with_stdout(
+ "\
+ {\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
+ ",
+ ),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("verify-project")
- .arg("--manifest-path").arg("foo/bar/baz")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(1)
- .with_stdout("\
-{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
- "));
+ assert_that(
+ p.cargo("verify-project")
+ .arg("--manifest-path")
+ .arg("foo/bar/baz")
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(1).with_stdout(
+ "\
+ {\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
+ ",
+ ),
+ );
}
#[test]
fn verify_project_dir_to_nonexistent_cargo_toml() {
let p = project("foo").build();
- assert_that(p.cargo("verify-project")
- .arg("--manifest-path").arg("foo/bar/baz/Cargo.toml")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(1)
- .with_stdout("\
-{\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\
- "));
+ assert_that(
+ p.cargo("verify-project")
+ .arg("--manifest-path")
+ .arg("foo/bar/baz/Cargo.toml")
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(1).with_stdout(
+ "\
+ {\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\
+ ",
+ ),
+ );
}
use cargo::util::process;
use cargotest::is_nightly;
use cargotest::support::paths::CargoPathExt;
-use cargotest::support::{project, execs, basic_bin_manifest, basic_lib_manifest};
+use cargotest::support::{basic_bin_manifest, basic_lib_manifest, execs, project};
use hamcrest::{assert_that, existing_file};
#[test]
fn cargo_bench_simple() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
#![feature(test)]
#[cfg(test)]
extern crate test;
#[bench]
fn bench_hello(_b: &mut test::Bencher) {
assert_eq!(hello(), "hello")
- }"#)
+ }"#,
+ )
.build();
assert_that(p.cargo("build"), execs());
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_stdout("hello\n"));
+ assert_that(process(&p.bin("foo")), execs().with_stdout("hello\n"));
- assert_that(p.cargo("bench"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.5.0 ({})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test bench_hello ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test bench_hello ... bench: [..]"),
+ );
}
#[test]
fn bench_bench_implicit() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![cfg_attr(test, feature(test))]
#[cfg(test)]
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }
- fn main() { println!("Hello main!"); }"#)
- .file("tests/other.rs", r#"
+ fn main() { println!("Hello main!"); }"#,
+ )
+ .file(
+ "tests/other.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run3(_ben: &mut test::Bencher) { }"#)
- .file("benches/mybench.rs", r#"
+ #[bench] fn run3(_ben: &mut test::Bencher) { }"#,
+ )
+ .file(
+ "benches/mybench.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run2(_ben: &mut test::Bencher) { }"#)
+ #[bench] fn run2(_ben: &mut test::Bencher) { }"#,
+ )
.build();
- assert_that(p.cargo("bench").arg("--benches"),
- execs().with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("bench").arg("--benches"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]release[/]deps[/]mybench-[..][EXE]
-", dir = p.url()))
- .with_stdout_contains("test run2 ... bench: [..]"));
+",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test run2 ... bench: [..]"),
+ );
}
#[test]
fn bench_bin_implicit() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(test)]
#[cfg(test)]
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }
- fn main() { println!("Hello main!"); }"#)
- .file("tests/other.rs", r#"
+ fn main() { println!("Hello main!"); }"#,
+ )
+ .file(
+ "tests/other.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run3(_ben: &mut test::Bencher) { }"#)
- .file("benches/mybench.rs", r#"
+ #[bench] fn run3(_ben: &mut test::Bencher) { }"#,
+ )
+ .file(
+ "benches/mybench.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run2(_ben: &mut test::Bencher) { }"#)
+ #[bench] fn run2(_ben: &mut test::Bencher) { }"#,
+ )
.build();
- assert_that(p.cargo("bench").arg("--bins"),
- execs().with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("bench").arg("--bins"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
-", dir = p.url()))
- .with_stdout_contains("test run1 ... bench: [..]"));
+",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test run1 ... bench: [..]"),
+ );
}
#[test]
fn bench_tarname() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("benches/bin1.rs", r#"
+ "#,
+ )
+ .file(
+ "benches/bin1.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run1(_ben: &mut test::Bencher) { }"#)
- .file("benches/bin2.rs", r#"
+ #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+ )
+ .file(
+ "benches/bin2.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run2(_ben: &mut test::Bencher) { }"#)
+ #[bench] fn run2(_ben: &mut test::Bencher) { }"#,
+ )
.build();
- assert_that(p.cargo("bench").arg("--bench").arg("bin2"),
- execs().with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("bench").arg("--bench").arg("bin2"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]bin2-[..][EXE]
-", dir = p.url()))
- .with_stdout_contains("test run2 ... bench: [..]"));
+",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test run2 ... bench: [..]"),
+ );
}
#[test]
fn bench_multiple_targets() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("benches/bin1.rs", r#"
+ "#,
+ )
+ .file(
+ "benches/bin1.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run1(_ben: &mut test::Bencher) { }"#)
- .file("benches/bin2.rs", r#"
+ #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+ )
+ .file(
+ "benches/bin2.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run2(_ben: &mut test::Bencher) { }"#)
- .file("benches/bin3.rs", r#"
+ #[bench] fn run2(_ben: &mut test::Bencher) { }"#,
+ )
+ .file(
+ "benches/bin3.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run3(_ben: &mut test::Bencher) { }"#)
+ #[bench] fn run3(_ben: &mut test::Bencher) { }"#,
+ )
.build();
- assert_that(p.cargo("bench")
- .arg("--bench").arg("bin1")
- .arg("--bench").arg("bin2"),
- execs()
- .with_status(0)
- .with_stdout_contains("test run1 ... bench: [..]")
- .with_stdout_contains("test run2 ... bench: [..]")
- .with_stdout_does_not_contain("run3"));
+ assert_that(
+ p.cargo("bench")
+ .arg("--bench")
+ .arg("bin1")
+ .arg("--bench")
+ .arg("bin2"),
+ execs()
+ .with_status(0)
+ .with_stdout_contains("test run1 ... bench: [..]")
+ .with_stdout_contains("test run2 ... bench: [..]")
+ .with_stdout_does_not_contain("run3"),
+ );
}
#[test]
fn cargo_bench_verbose() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
#![feature(test)]
#[cfg(test)]
extern crate test;
fn main() {}
#[bench] fn bench_hello(_b: &mut test::Bencher) {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench").arg("-v").arg("hello"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench").arg("-v").arg("hello"),
+ execs()
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.5.0 ({url})
[RUNNING] `rustc [..] src[/]main.rs [..]`
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] hello --bench`", url = p.url()))
- .with_stdout_contains("test bench_hello ... bench: [..]"));
+[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] hello --bench`",
+ url = p.url()
+ ))
+ .with_stdout_contains("test bench_hello ... bench: [..]"),
+ );
}
#[test]
fn many_similar_names() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#![feature(test)]
#[cfg(test)]
extern crate test;
pub fn foo() {}
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
#![feature(test)]
#[cfg(test)]
extern crate foo;
extern crate test;
fn main() {}
#[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() }
- ")
- .file("benches/foo.rs", r#"
+ ",
+ )
+ .file(
+ "benches/foo.rs",
+ r#"
#![feature(test)]
extern crate foo;
extern crate test;
#[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() }
- "#)
+ "#,
+ )
.build();
let output = p.cargo("bench").exec_with_output().unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
- assert!(output.contains("test bin_bench"), "bin_bench missing\n{}", output);
- assert!(output.contains("test lib_bench"), "lib_bench missing\n{}", output);
- assert!(output.contains("test bench_bench"), "bench_bench missing\n{}", output);
+ assert!(
+ output.contains("test bin_bench"),
+ "bin_bench missing\n{}",
+ output
+ );
+ assert!(
+ output.contains("test lib_bench"),
+ "lib_bench missing\n{}",
+ output
+ );
+ assert!(
+ output.contains("test bench_bench"),
+ "bench_bench missing\n{}",
+ output
+ );
}
#[test]
fn cargo_bench_failing_test() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
#![feature(test)]
#[cfg(test)]
extern crate test;
#[bench]
fn bench_hello(_b: &mut test::Bencher) {
assert_eq!(hello(), "nope")
- }"#)
+ }"#,
+ )
.build();
assert_that(p.cargo("build"), execs());
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_stdout("hello\n"));
+ assert_that(process(&p.bin("foo")), execs().with_stdout("hello\n"));
// Force libtest into serial execution so that the test header will be printed.
- assert_that(p.cargo("bench").arg("--").arg("--test-threads=1"),
- execs().with_stdout_contains("test bench_hello ...[..]")
- .with_stderr_contains(format!("\
+ assert_that(
+ p.cargo("bench").arg("--").arg("--test-threads=1"),
+ execs()
+ .with_stdout_contains("test bench_hello ...[..]")
+ .with_stderr_contains(format!(
+ "\
[COMPILING] foo v0.5.0 ({})[..]
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
- .with_either_contains("[..]thread '[..]' panicked at 'assertion failed: `(left == right)`[..]")
- .with_either_contains("[..]left: `\"hello\"`[..]")
- .with_either_contains("[..]right: `\"nope\"`[..]")
- .with_either_contains("[..]src[/]main.rs:15[..]")
- .with_status(101));
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_either_contains(
+ "[..]thread '[..]' panicked at 'assertion failed: `(left == right)`[..]",
+ )
+ .with_either_contains("[..]left: `\"hello\"`[..]")
+ .with_either_contains("[..]right: `\"nope\"`[..]")
+ .with_either_contains("[..]src[/]main.rs:15[..]")
+ .with_status(101),
+ );
}
#[test]
fn bench_with_lib_dep() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bin]]
name = "baz"
path = "src/main.rs"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![cfg_attr(test, feature(test))]
#[cfg(test)]
extern crate test;
///
pub fn foo(){}
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
- "#)
- .file("src/main.rs", "
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
#![feature(test)]
#[allow(unused_extern_crates)]
extern crate foo;
#[bench]
fn bin_bench(_b: &mut test::Bencher) {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
-[RUNNING] target[/]release[/]deps[/]baz-[..][EXE]", p.url()))
- .with_stdout_contains("test lib_bench ... bench: [..]")
- .with_stdout_contains("test bin_bench ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]baz-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test lib_bench ... bench: [..]")
+ .with_stdout_contains("test bin_bench ... bench: [..]"),
+ );
}
#[test]
fn bench_with_deep_lib_dep() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies.foo]
path = "../foo"
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#![cfg_attr(test, feature(test))]
#[cfg(test)]
extern crate foo;
fn bar_bench(_b: &mut test::Bencher) {
foo::foo();
}
- ")
+ ",
+ )
.build();
let _p2 = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#![cfg_attr(test, feature(test))]
#[cfg(test)]
extern crate test;
#[bench]
fn foo_bench(_b: &mut test::Bencher) {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ([..])
[COMPILING] bar v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]", dir = p.url()))
- .with_stdout_contains("test bar_bench ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test bar_bench ... bench: [..]"),
+ );
}
#[test]
fn external_bench_explicit() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bench]]
name = "bench"
path = "src/bench.rs"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![cfg_attr(test, feature(test))]
#[cfg(test)]
extern crate test;
#[bench]
fn internal_bench(_b: &mut test::Bencher) {}
- "#)
- .file("src/bench.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bench.rs",
+ r#"
#![feature(test)]
#[allow(unused_extern_crates)]
extern crate foo;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
-[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", p.url()))
- .with_stdout_contains("test internal_bench ... bench: [..]")
- .with_stdout_contains("test external_bench ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test internal_bench ... bench: [..]")
+ .with_stdout_contains("test external_bench ... bench: [..]"),
+ );
}
#[test]
fn external_bench_implicit() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![cfg_attr(test, feature(test))]
#[cfg(test)]
extern crate test;
#[bench]
fn internal_bench(_b: &mut test::Bencher) {}
- "#)
- .file("benches/external.rs", r#"
+ "#,
+ )
+ .file(
+ "benches/external.rs",
+ r#"
#![feature(test)]
#[allow(unused_extern_crates)]
extern crate foo;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
-[RUNNING] target[/]release[/]deps[/]external-[..][EXE]", p.url()))
- .with_stdout_contains("test internal_bench ... bench: [..]")
- .with_stdout_contains("test external_bench ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]external-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test internal_bench ... bench: [..]")
+ .with_stdout_contains("test external_bench ... bench: [..]"),
+ );
}
#[test]
fn dont_run_examples() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", r"")
- .file("examples/dont-run-me-i-will-fail.rs", r#"
+ .file(
+ "examples/dont-run-me-i-will-fail.rs",
+ r#"
fn main() { panic!("Examples should not be run by 'cargo test'"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(0));
+ assert_that(p.cargo("bench"), execs().with_status(0));
}
#[test]
fn pass_through_command_line() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#![feature(test)]
#[cfg(test)]
extern crate test;
#[bench] fn foo(_b: &mut test::Bencher) {}
#[bench] fn bar(_b: &mut test::Bencher) {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("bench").arg("bar"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench").arg("bar"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", dir = p.url()))
- .with_stdout_contains("test bar ... bench: [..]"));
-
- assert_that(p.cargo("bench").arg("foo"),
- execs().with_status(0)
- .with_stderr("[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
- .with_stdout_contains("test foo ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test bar ... bench: [..]"),
+ );
+
+ assert_that(
+ p.cargo("bench").arg("foo"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ )
+ .with_stdout_contains("test foo ... bench: [..]"),
+ );
}
// Regression test for running cargo-bench twice with
// tests in an rlib
#[test]
fn cargo_bench_twice() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("test_twice")
.file("Cargo.toml", &basic_lib_manifest("test_twice"))
- .file("src/test_twice.rs", r#"
+ .file(
+ "src/test_twice.rs",
+ r#"
#![crate_type = "rlib"]
#![feature(test)]
#[cfg(test)]
#[bench]
fn dummy_bench(b: &mut test::Bencher) { }
- "#)
+ "#,
+ )
.build();
p.cargo("build");
for _ in 0..2 {
- assert_that(p.cargo("bench"),
- execs().with_status(0));
+ assert_that(p.cargo("bench"), execs().with_status(0));
}
}
#[test]
fn lib_bin_same_name() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
name = "foo"
[[bin]]
name = "foo"
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#![cfg_attr(test, feature(test))]
#[cfg(test)]
extern crate test;
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
#![cfg_attr(test, feature(test))]
#[allow(unused_extern_crates)]
extern crate foo;
#[bench]
fn bin_bench(_b: &mut test::Bencher) {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains_n("test [..] ... bench: [..]", 2));
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains_n("test [..] ... bench: [..]", 2),
+ );
}
#[test]
fn lib_with_standard_name() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#![cfg_attr(test, feature(test))]
#[cfg(test)]
extern crate test;
#[bench]
fn foo_bench(_b: &mut test::Bencher) {}
- ")
- .file("benches/bench.rs", "
+ ",
+ )
+ .file(
+ "benches/bench.rs",
+ "
#![feature(test)]
extern crate syntax;
extern crate test;
#[bench]
fn bench(_b: &mut test::Bencher) { syntax::foo() }
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE]
-[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", dir = p.url()))
- .with_stdout_contains("test foo_bench ... bench: [..]")
- .with_stdout_contains("test bench ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test foo_bench ... bench: [..]")
+ .with_stdout_contains("test bench ... bench: [..]"),
+ );
}
#[test]
fn lib_with_standard_name2() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
name = "syntax"
bench = false
doctest = false
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
#![feature(test)]
#[cfg(test)]
extern crate syntax;
#[bench]
fn bench(_b: &mut test::Bencher) { syntax::foo() }
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE]", dir = p.url()))
- .with_stdout_contains("test bench ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test bench ... bench: [..]"),
+ );
}
#[test]
fn bench_dylib() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![cfg_attr(test, feature(test))]
extern crate bar as the_bar;
#[cfg(test)]
#[bench]
fn foo(_b: &mut test::Bencher) {}
- "#)
- .file("benches/bench.rs", r#"
+ "#,
+ )
+ .file(
+ "benches/bench.rs",
+ r#"
#![feature(test)]
extern crate foo as the_foo;
extern crate test;
#[bench]
fn foo(_b: &mut test::Bencher) { the_foo::bar(); }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
crate_type = ["dylib"]
- "#)
- .file("bar/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ "
pub fn baz() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("bench").arg("-v"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 ({dir}/bar)
[RUNNING] [..] -C opt-level=3 [..]
[COMPILING] foo v0.0.1 ({dir})
[RUNNING] [..] -C opt-level=3 [..]
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench`
-[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", dir = p.url()))
- .with_stdout_contains_n("test foo ... bench: [..]", 2));
+[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`",
+ dir = p.url()
+ ))
+ .with_stdout_contains_n("test foo ... bench: [..]", 2),
+ );
p.root().move_into_the_past();
- assert_that(p.cargo("bench").arg("-v"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[FRESH] bar v0.0.1 ({dir}/bar)
[FRESH] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench`
-[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", dir = p.url()))
- .with_stdout_contains_n("test foo ... bench: [..]", 2));
+[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`",
+ dir = p.url()
+ ))
+ .with_stdout_contains_n("test foo ... bench: [..]", 2),
+ );
}
#[test]
fn bench_twice_with_build_cmd() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("build.rs", "fn main() {}")
- .file("src/lib.rs", "
+ .file(
+ "src/lib.rs",
+ "
#![feature(test)]
#[cfg(test)]
extern crate test;
#[bench]
fn foo(_b: &mut test::Bencher) {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", dir = p.url()))
- .with_stdout_contains("test foo ... bench: [..]"));
-
- assert_that(p.cargo("bench"),
- execs().with_status(0)
- .with_stderr("[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
- .with_stdout_contains("test foo ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test foo ... bench: [..]"),
+ );
+
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ )
+ .with_stdout_contains("test foo ... bench: [..]"),
+ );
}
#[test]
fn bench_with_examples() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("testbench")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "testbench"
version = "6.6.6"
[[bench]]
name = "testb1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![cfg_attr(test, feature(test))]
#[cfg(test)]
extern crate test;
fn bench_bench1(_b: &mut Bencher) {
f2();
}
- "#)
- .file("benches/testb1.rs", "
+ "#,
+ )
+ .file(
+ "benches/testb1.rs",
+ "
#![feature(test)]
extern crate testbench;
extern crate test;
fn bench_bench2(_b: &mut Bencher) {
testbench::f2();
}
- ")
- .file("examples/teste1.rs", r#"
+ ",
+ )
+ .file(
+ "examples/teste1.rs",
+ r#"
extern crate testbench;
fn main() {
println!("example1");
testbench::f1();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench").arg("-v"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("bench").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] testbench v6.6.6 ({url})
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `{dir}[/]target[/]release[/]deps[/]testbench-[..][EXE] --bench`
[RUNNING] `{dir}[/]target[/]release[/]deps[/]testb1-[..][EXE] --bench`",
- dir = p.root().display(), url = p.url()))
- .with_stdout_contains("test bench_bench1 ... bench: [..]")
- .with_stdout_contains("test bench_bench2 ... bench: [..]"));
+ dir = p.root().display(),
+ url = p.url()
+ ))
+ .with_stdout_contains("test bench_bench1 ... bench: [..]")
+ .with_stdout_contains("test bench_bench2 ... bench: [..]"),
+ );
}
#[test]
fn test_a_bench() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
authors = []
[[bench]]
name = "b"
test = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("benches/b.rs", r#"
+ .file(
+ "benches/b.rs",
+ r#"
#[test]
fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]")
- .with_stdout_contains("test foo ... ok"));
+[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]",
+ )
+ .with_stdout_contains("test foo ... ok"),
+ );
}
#[test]
fn test_bench_no_run() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("benches/bbaz.rs", r#"
+ .file(
+ "benches/bbaz.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench_baz(_: &mut Bencher) {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench").arg("--no-run"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("bench").arg("--no-run"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.1.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn test_bench_no_fail_fast() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/foo.rs", r#"
+ .file(
+ "src/foo.rs",
+ r#"
#![feature(test)]
#[cfg(test)]
extern crate test;
#[bench]
fn bench_nope(_b: &mut test::Bencher) {
assert_eq!("nope", hello())
- }"#)
+ }"#,
+ )
.build();
- assert_that(p.cargo("bench").arg("--no-fail-fast").arg("--").arg("--test-threads=1"),
- execs().with_status(101)
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
- .with_stdout_contains("running 2 tests")
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
- .with_stdout_contains("test bench_hello [..]")
- .with_stdout_contains("test bench_nope [..]"));
+ assert_that(
+ p.cargo("bench")
+ .arg("--no-fail-fast")
+ .arg("--")
+ .arg("--test-threads=1"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ )
+ .with_stdout_contains("running 2 tests")
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ )
+ .with_stdout_contains("test bench_hello [..]")
+ .with_stdout_contains("test bench_nope [..]"),
+ );
}
#[test]
fn test_bench_multiple_packages() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
authors = []
[dependencies.baz]
path = "../baz"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
authors = []
[[bench]]
name = "bbar"
test = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("benches/bbar.rs", r#"
+ .file(
+ "benches/bbar.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench_bar(_b: &mut Bencher) {}
- "#)
+ "#,
+ )
.build();
let _baz = project("baz")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "baz"
authors = []
[[bench]]
name = "bbaz"
test = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("benches/bbaz.rs", r#"
+ .file(
+ "benches/bbaz.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench_baz(_b: &mut Bencher) {}
- "#)
+ "#,
+ )
.build();
-
- assert_that(p.cargo("bench").arg("-p").arg("bar").arg("-p").arg("baz"),
- execs().with_status(0)
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]bbaz-[..][EXE]")
- .with_stdout_contains("test bench_baz ... bench: [..]")
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]bbar-[..][EXE]")
- .with_stdout_contains("test bench_bar ... bench: [..]"));
+ assert_that(
+ p.cargo("bench").arg("-p").arg("bar").arg("-p").arg("baz"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]bbaz-[..][EXE]",
+ )
+ .with_stdout_contains("test bench_baz ... bench: [..]")
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]bbar-[..][EXE]",
+ )
+ .with_stdout_contains("test bench_bar ... bench: [..]"),
+ );
}
#[test]
fn bench_all_workspace() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
bar = { path = "bar" }
[workspace]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("benches/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "benches/foo.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench_foo(_: &mut Bencher) -> () { () }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
- .file("bar/benches/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/benches/bar.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench_bar(_: &mut Bencher) -> () { () }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench")
- .arg("--all"),
- execs().with_status(0)
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]")
- .with_stdout_contains("test bench_bar ... bench: [..]")
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
- .with_stdout_contains("test bench_foo ... bench: [..]"));
+ assert_that(
+ p.cargo("bench").arg("--all"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]bar-[..][EXE]",
+ )
+ .with_stdout_contains("test bench_bar ... bench: [..]")
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ )
+ .with_stdout_contains("test bench_foo ... bench: [..]"),
+ );
}
#[test]
fn bench_all_exclude() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar", "baz"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#![feature(test)]
#[cfg(test)]
extern crate test;
pub fn bar(b: &mut test::Bencher) {
b.iter(|| {});
}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.1.0"
- "#)
- .file("baz/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/lib.rs",
+ r#"
#[test]
pub fn baz() {
break_the_build();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench")
- .arg("--all")
- .arg("--exclude")
- .arg("baz"),
- execs().with_status(0)
- .with_stdout_contains("\
+ assert_that(
+ p.cargo("bench").arg("--all").arg("--exclude").arg("baz"),
+ execs().with_status(0).with_stdout_contains(
+ "\
running 1 test
-test bar ... bench: [..] ns/iter (+/- [..])"));
+test bar ... bench: [..] ns/iter (+/- [..])",
+ ),
+ );
}
#[test]
fn bench_all_virtual_manifest() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
- .file("foo/benches/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/benches/foo.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench_foo(_: &mut Bencher) -> () { () }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
- .file("bar/benches/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/benches/bar.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench_bar(_: &mut Bencher) -> () { () }
- "#)
+ "#,
+ )
.build();
// The order in which foo and bar are built is not guaranteed
- assert_that(p.cargo("bench")
- .arg("--all"),
- execs().with_status(0)
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]")
- .with_stdout_contains("test bench_bar ... bench: [..]")
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
- .with_stdout_contains("test bench_foo ... bench: [..]"));
+ assert_that(
+ p.cargo("bench").arg("--all"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]bar-[..][EXE]",
+ )
+ .with_stdout_contains("test bench_bar ... bench: [..]")
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ )
+ .with_stdout_contains("test bench_foo ... bench: [..]"),
+ );
}
// https://github.com/rust-lang/cargo/issues/4287
#[test]
fn legacy_bench_name() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[[bench]]
name = "bench"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
- .file("src/bench.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bench.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench_foo(_: &mut Bencher) -> () { () }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench"), execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("bench"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[WARNING] path `[..]src[/]bench.rs` was erroneously implicitly accepted for benchmark `bench`,
-please set bench.path in Cargo.toml"));
+please set bench.path in Cargo.toml",
+ ),
+ );
}
#[test]
fn bench_virtual_manifest_all_implied() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
- .file("foo/benches/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/benches/foo.rs",
+ r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_foo(_: &mut Bencher) -> () { () }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
- .file("bar/benches/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/benches/bar.rs",
+ r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_bar(_: &mut Bencher) -> () { () }
- "#)
+ "#,
+ )
.build();
// The order in which foo and bar are built is not guaranteed
- assert_that(p.cargo("bench"),
- execs().with_status(0)
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]")
- .with_stdout_contains("test bench_bar ... bench: [..]")
- .with_stderr_contains("\
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
- .with_stdout_contains("test bench_foo ... bench: [..]"));
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]bar-[..][EXE]",
+ )
+ .with_stdout_contains("test bench_bar ... bench: [..]")
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ )
+ .with_stdout_contains("test bench_foo ... bench: [..]"),
+ );
}
use cargo::util::paths::dylib_path_envvar;
use cargo::util::{process, ProcessBuilder};
use cargotest::{is_nightly, rustc_host, sleep_ms};
-use cargotest::support::paths::{CargoPathExt,root};
-use cargotest::support::{ProjectBuilder};
-use cargotest::support::{project, execs, main_file, basic_bin_manifest};
+use cargotest::support::paths::{root, CargoPathExt};
+use cargotest::support::ProjectBuilder;
+use cargotest::support::{basic_bin_manifest, execs, main_file, project};
use cargotest::support::registry::Package;
use cargotest::ChannelChanger;
-use hamcrest::{assert_that, existing_file, existing_dir, is_not};
+use hamcrest::{assert_that, existing_dir, existing_file, is_not};
use tempdir::TempDir;
#[test]
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("i am foo\n"));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("i am foo\n"),
+ );
}
#[test]
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &String::from("refusal"))
.build();
- assert_that(p.cargo("build").arg("--message-format=json"), execs().with_status(101)
- .with_stderr_does_not_contain("--- stderr"));
+ assert_that(
+ p.cargo("build").arg("--message-format=json"),
+ execs()
+ .with_status(101)
+ .with_stderr_does_not_contain("--- stderr"),
+ );
}
/// Check that the `CARGO_INCREMENTAL` environment variable results in
#[test]
fn cargo_compile_incremental() {
if !is_nightly() {
- return
+ return;
}
let p = project("foo")
assert_that(
p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"),
- execs().with_stderr_contains(
- "[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n")
- .with_status(0));
+ execs()
+ .with_stderr_contains(
+ "[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n",
+ )
+ .with_status(0),
+ );
assert_that(
p.cargo("test").arg("-v").env("CARGO_INCREMENTAL", "1"),
- execs().with_stderr_contains(
- "[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n")
- .with_status(0));
+ execs()
+ .with_stderr_contains(
+ "[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n",
+ )
+ .with_status(0),
+ );
}
#[test]
fn incremental_profile() {
if !is_nightly() {
- return
+ return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[profile.release]
incremental = true
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
assert_that(
p.cargo("build").arg("-v").env_remove("CARGO_INCREMENTAL"),
- execs().with_stderr_does_not_contain("[..]C incremental=[..]")
- .with_status(0));
+ execs()
+ .with_stderr_does_not_contain("[..]C incremental=[..]")
+ .with_status(0),
+ );
assert_that(
p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"),
- execs().with_stderr_contains("[..]C incremental=[..]")
- .with_status(0));
+ execs()
+ .with_stderr_contains("[..]C incremental=[..]")
+ .with_status(0),
+ );
assert_that(
- p.cargo("build").arg("--release").arg("-v").env_remove("CARGO_INCREMENTAL"),
- execs().with_stderr_contains("[..]C incremental=[..]")
- .with_status(0));
+ p.cargo("build")
+ .arg("--release")
+ .arg("-v")
+ .env_remove("CARGO_INCREMENTAL"),
+ execs()
+ .with_stderr_contains("[..]C incremental=[..]")
+ .with_status(0),
+ );
assert_that(
- p.cargo("build").arg("--release").arg("-v").env("CARGO_INCREMENTAL", "0"),
- execs().with_stderr_does_not_contain("[..]C incremental=[..]")
- .with_status(0));
+ p.cargo("build")
+ .arg("--release")
+ .arg("-v")
+ .env("CARGO_INCREMENTAL", "0"),
+ execs()
+ .with_stderr_does_not_contain("[..]C incremental=[..]")
+ .with_status(0),
+ );
}
#[test]
fn incremental_config() {
if !is_nightly() {
- return
+ return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
incremental = false
- "#)
+ "#,
+ )
.build();
assert_that(
p.cargo("build").arg("-v").env_remove("CARGO_INCREMENTAL"),
- execs().with_stderr_does_not_contain("[..]C incremental=[..]")
- .with_status(0));
+ execs()
+ .with_stderr_does_not_contain("[..]C incremental=[..]")
+ .with_status(0),
+ );
assert_that(
p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"),
- execs().with_stderr_contains("[..]C incremental=[..]")
- .with_status(0));
+ execs()
+ .with_stderr_contains("[..]C incremental=[..]")
+ .with_status(0),
+ );
}
#[test]
fn cargo_compile_with_workspace_excluded() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
assert_that(
p.cargo("build").arg("--all").arg("--exclude").arg("foo"),
- execs().with_stderr_does_not_contain("[..]virtual[..]")
+ execs()
+ .with_stderr_does_not_contain("[..]virtual[..]")
.with_stderr_contains("[..]no packages to compile")
- .with_status(101));
+ .with_status(101),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("build")
- .arg("--manifest-path").arg("foo/Cargo.toml")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build")
+ .arg("--manifest-path")
+ .arg("foo/Cargo.toml")
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn cargo_compile_with_invalid_manifest() {
- let p = project("foo")
- .file("Cargo.toml", "")
- .build();
+ let p = project("foo").file("Cargo.toml", "").build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
virtual manifests must be configured with [workspace]
-"))
+",
+ ),
+ )
}
#[test]
fn cargo_compile_with_invalid_manifest2() {
let p = project("foo")
- .file("Cargo.toml", r"
+ .file(
+ "Cargo.toml",
+ r"
[project]
foo = bar
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Caused by:
invalid number at line 3
-"))
+",
+ ),
+ )
}
#[test]
fn cargo_compile_with_invalid_manifest3() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/Cargo.toml", "a = bar")
.build();
- assert_that(p.cargo("build").arg("--manifest-path")
- .arg("src/Cargo.toml"),
- execs()
- .with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .arg("--manifest-path")
+ .arg("src/Cargo.toml"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Caused by:
invalid number at line 1
-"))
+",
+ ),
+ )
}
#[test]
fn cargo_compile_duplicate_build_targets() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
crate-type = ["dylib"]
[dependencies]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![allow(warnings)]
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
warning: file found to be present in multiple build targets: [..]main.rs
[COMPILING] foo v0.0.1 ([..])
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn cargo_compile_with_invalid_version() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
authors = []
version = "1.0"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Expected dot for key `project.version`
-"))
-
+",
+ ),
+ )
}
#[test]
fn cargo_compile_with_invalid_package_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = ""
authors = []
version = "0.0.0"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
package name cannot be an empty string
-"))
+",
+ ),
+ )
}
#[test]
fn cargo_compile_with_invalid_bin_target_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[[bin]]
name = ""
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
binary target names cannot be empty
-"))
+",
+ ),
+ )
}
#[test]
fn cargo_compile_with_forbidden_bin_target_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[[bin]]
name = "build"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
the binary target name `build` is forbidden
-"))
+",
+ ),
+ )
}
#[test]
fn cargo_compile_with_invalid_lib_target_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[lib]
name = ""
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
library target names cannot be empty
-"))
+",
+ ),
+ )
}
#[test]
let tmpdir = TempDir::new("cargo").unwrap();
let p = ProjectBuilder::new("foo", tmpdir.path().to_path_buf()).build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory
-"));
+",
+ ),
+ );
}
#[test]
.file("src/foo.rs", "invalid rust code!")
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
[ERROR] Could not compile `foo`.
-To learn more, run the command again with --verbose.\n"));
+To learn more, run the command again with --verbose.\n",
+ ),
+ );
assert_that(&p.root().join("Cargo.lock"), existing_file());
}
#[test]
fn cargo_compile_with_invalid_code_in_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
path = "../bar"
[dependencies.baz]
path = "../baz"
- "#)
+ "#,
+ )
.file("src/main.rs", "invalid rust code!")
.build();
let _bar = project("bar")
.file("src/foo.rs", "fn main() {} fn dead() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[..]function is never used: `dead`[..]
-"));
+",
+ ),
+ );
}
#[test]
fn cargo_compile_with_warnings_in_a_dep_package() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[[bin]]
name = "foo"
- "#)
- .file("src/foo.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
- "#)
- .file("bar/src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
pub fn gimme() -> &'static str {
"test passed"
}
fn dead() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[..]function is never used: `dead`[..]
-"));
+",
+ ),
+ );
assert_that(&p.bin("foo"), existing_file());
assert_that(
- process(&p.bin("foo")),
- execs().with_status(0).with_stdout("test passed\n"));
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("test passed\n"),
+ );
}
#[test]
fn cargo_compile_with_nested_deps_inferred() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[[bin]]
name = "foo"
- "#)
- .file("src/foo.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[dependencies.baz]
path = "../baz"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("baz/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/lib.rs",
+ r#"
pub fn gimme() -> String {
"test passed".to_string()
}
- "#)
+ "#,
+ )
.build();
- p.cargo("build")
- .exec_with_output()
- .unwrap();
+ p.cargo("build").exec_with_output().unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(
- process(&p.bin("foo")),
- execs().with_status(0).with_stdout("test passed\n"));
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("test passed\n"),
+ );
}
#[test]
fn cargo_compile_with_nested_deps_correct_bin() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[[bin]]
name = "foo"
- "#)
- .file("src/main.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[dependencies.baz]
path = "../baz"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("baz/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/lib.rs",
+ r#"
pub fn gimme() -> String {
"test passed".to_string()
}
- "#)
+ "#,
+ )
.build();
- p.cargo("build")
- .exec_with_output()
- .unwrap();
+ p.cargo("build").exec_with_output().unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(
- process(&p.bin("foo")),
- execs().with_status(0).with_stdout("test passed\n"));
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("test passed\n"),
+ );
}
#[test]
fn cargo_compile_with_nested_deps_shorthand() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
- "#)
- .file("bar/src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
[lib]
name = "baz"
- "#)
- .file("baz/src/baz.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/baz.rs",
+ r#"
pub fn gimme() -> String {
"test passed".to_string()
}
- "#)
+ "#,
+ )
.build();
- p.cargo("build")
- .exec_with_output()
- .unwrap();
+ p.cargo("build").exec_with_output().unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(
- process(&p.bin("foo")),
- execs().with_status(0).with_stdout("test passed\n"));
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("test passed\n"),
+ );
}
#[test]
fn cargo_compile_with_nested_deps_longhand() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[[bin]]
name = "foo"
- "#)
- .file("src/foo.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
- "#)
- .file("bar/src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
[lib]
name = "baz"
- "#)
- .file("baz/src/baz.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/baz.rs",
+ r#"
pub fn gimme() -> String {
"test passed".to_string()
}
- "#)
+ "#,
+ )
.build();
assert_that(p.cargo("build"), execs());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("test passed\n"));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("test passed\n"),
+ );
}
// Check that Cargo gives a sensible error if a dependency can't be found
#[test]
fn cargo_compile_with_dep_name_mismatch() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
[dependencies.notquitebar]
path = "bar"
- "#)
+ "#,
+ )
.file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"]))
.file("bar/Cargo.toml", &basic_bin_manifest("bar"))
.file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[]))
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr(&format!(
-r#"error: no matching package named `notquitebar` found
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(&format!(
+ r#"error: no matching package named `notquitebar` found
location searched: {proj_dir}/bar
required by package `foo v0.0.1 ({proj_dir})`
-"#, proj_dir = p.url())));
+"#,
+ proj_dir = p.url()
+ )),
+ );
}
#[test]
fn cargo_compile_with_filename() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("src/bin/a.rs", r#"
+ .file(
+ "src/bin/a.rs",
+ r#"
extern crate foo;
fn main() { println!("hello a.rs"); }
- "#)
- .file("examples/a.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/a.rs",
+ r#"
fn main() { println!("example"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("--bin").arg("bin.rs"),
- execs().with_status(101).with_stderr("\
-[ERROR] no bin target named `bin.rs`"));
+ assert_that(
+ p.cargo("build").arg("--bin").arg("bin.rs"),
+ execs().with_status(101).with_stderr(
+ "\
+ [ERROR] no bin target named `bin.rs`",
+ ),
+ );
- assert_that(p.cargo("build").arg("--bin").arg("a.rs"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--bin").arg("a.rs"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] no bin target named `a.rs`
-Did you mean `a`?"));
+Did you mean `a`?",
+ ),
+ );
- assert_that(p.cargo("build").arg("--example").arg("example.rs"),
- execs().with_status(101).with_stderr("\
-[ERROR] no example target named `example.rs`"));
+ assert_that(
+ p.cargo("build").arg("--example").arg("example.rs"),
+ execs().with_status(101).with_stderr(
+ "\
+ [ERROR] no example target named `example.rs`",
+ ),
+ );
- assert_that(p.cargo("build").arg("--example").arg("a.rs"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--example").arg("a.rs"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] no example target named `a.rs`
-Did you mean `a`?"));
+Did you mean `a`?",
+ ),
+ );
}
#[test]
fn cargo_compile_path_with_offline() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zoffline"),
+ execs().with_status(0),
+ );
}
#[test]
fn cargo_compile_with_downloaded_dependency_with_offline() {
Package::new("present_dep", "1.2.3")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "present_dep"
version = "1.2.3"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.publish();
{
// make package downloaded
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "1.2.3"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
let p2 = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
present_dep = "1.2.3"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p2.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
- execs().with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p2.cargo("build")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zoffline"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] present_dep v1.2.3
[COMPILING] bar v0.1.0 ([..])
-[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")));
-
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"
+ )),
+ );
}
#[test]
fn cargo_compile_offline_not_try_update() {
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
not_cached_dep = "1.2.5"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zoffline"),
+ execs().with_status(101).with_stderr(
+ "\
error: no matching package named `not_cached_dep` found
location searched: registry `[..]`
required by package `bar v0.1.0 ([..])`
As a reminder, you're using offline mode (-Z offline) \
which can sometimes cause surprising resolution failures, \
if this error is too confusing you may with to retry \
-without the offline flag."));
+without the offline flag.",
+ ),
+ );
}
#[test]
-fn compile_offline_without_maxvers_cached(){
+fn compile_offline_without_maxvers_cached() {
Package::new("present_dep", "1.2.1").publish();
Package::new("present_dep", "1.2.2").publish();
Package::new("present_dep", "1.2.3")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "present_dep"
version = "1.2.3"
- "#)
- .file("src/lib.rs", r#"pub fn get_version()->&'static str {"1.2.3"}"#)
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"pub fn get_version()->&'static str {"1.2.3"}"#,
+ )
.publish();
Package::new("present_dep", "1.2.5")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "present_dep"
version = "1.2.5"
- "#)
+ "#,
+ )
.file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#)
.publish();
{
// make package cached
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "=1.2.3"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
let p2 = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "1.2"
- "#)
- .file("src/main.rs", "\
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "\
extern crate present_dep;
fn main(){
println!(\"{}\", present_dep::get_version());
-}")
+}",
+ )
.build();
- assert_that(p2.cargo("run").masquerade_as_nightly_cargo().arg("-Zoffline"),
- execs().with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p2.cargo("run")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zoffline"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] present_dep v1.2.3
[COMPILING] foo v0.1.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
- Running `[..]`", url = p2.url()))
- .with_stdout("1.2.3")
+ Running `[..]`",
+ url = p2.url()
+ ))
+ .with_stdout("1.2.3"),
);
}
Package::new("baz", "0.1.0").dep("bad", ">=1.0.1").publish();
let p = project("transitive_load_test")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "incompatible_dependencies"
version = "0.0.1"
foo = "0.1.0"
bar = "0.1.0"
baz = "0.1.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main(){}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: failed to select a version for `bad`.
... required by package `baz v0.1.0`
... which is depended on by `incompatible_dependencies v0.0.1 ([..])`
... which is depended on by `bar v0.1.0`
... which is depended on by `incompatible_dependencies v0.0.1 ([..])`
-failed to select a version for `bad` which could resolve this conflict"));
+failed to select a version for `bad` which could resolve this conflict",
+ ),
+ );
}
#[test]
Package::new("baz", "0.1.0").dep("bad", ">=2.0.1").publish();
let p = project("transitive_load_test")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "incompatible_dependencies"
version = "0.0.1"
bar = "0.1.0"
baz = "0.1.0"
bad = ">=1.0.1, <=2.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main(){}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: failed to select a version for `bad`.
... required by package `incompatible_dependencies v0.0.1 ([..])`
versions that meet the requirements `>= 1.0.1, <= 2.0.0` are: 2.0.0, 1.0.1
... which is depended on by `bar v0.1.0`
... which is depended on by `incompatible_dependencies v0.0.1 ([..])`
-failed to select a version for `bad` which could resolve this conflict"));
+failed to select a version for `bad` which could resolve this conflict",
+ ),
+ );
}
#[test]
let mut file = File::open(bar_path.clone()).ok().unwrap();
let _ok = file.read_to_end(&mut content).ok().unwrap();
drop(file);
- drop(File::create(bar_path.clone()).ok().unwrap() );
+ drop(File::create(bar_path.clone()).ok().unwrap());
Package::new("foo", "0.1.0").dep("bar", "1.0.0").publish();
let p = project("transitive_load_test")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "transitive_load_test"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main(){}")
.build();
// simulate download foo, but fail to download bar
let _out = p.cargo("build").exec_with_output();
- drop( File::create(bar_path).ok().unwrap().write_all(&content) );
+ drop(File::create(bar_path).ok().unwrap().write_all(&content));
- assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zoffline"),
+ execs().with_status(101).with_stderr(
+ "\
error: no matching package named `bar` found
location searched: registry `[..]`
required by package `foo v0.1.0`
As a reminder, you're using offline mode (-Z offline) \
which can sometimes cause surprising resolution failures, \
if this error is too confusing you may with to retry \
-without the offline flag."));
+without the offline flag.",
+ ),
+ );
}
#[test]
fn compile_path_dep_then_change_version() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
- File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#"
+ File::create(&p.root().join("bar/Cargo.toml"))
+ .unwrap()
+ .write_all(
+ br#"
[package]
name = "bar"
version = "0.0.2"
authors = []
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: no matching version `= 0.0.1` found for package `bar`
location searched: [..]
versions found: 0.0.2
required by package `foo v0.0.1 ([..]/foo)`
consider running `cargo update` to update a path dependency's locked version
-"));
+",
+ ),
+ );
}
#[test]
fn ignores_carriage_return_in_lockfile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
mod a; fn main() {}
- "#)
+ "#,
+ )
.file("src/a.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
let lockfile = p.root().join("Cargo.lock");
let mut lock = String::new();
- File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap();
+ File::open(&lockfile)
+ .unwrap()
+ .read_to_string(&mut lock)
+ .unwrap();
let lock = lock.replace("\n", "\r\n");
- File::create(&lockfile).unwrap().write_all(lock.as_bytes()).unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ File::create(&lockfile)
+ .unwrap()
+ .write_all(lock.as_bytes())
+ .unwrap();
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
// Ensure that path dep + dylib + env_var get metadata
// (even though path_dep + dylib should not)
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "// hi")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
crate_type = ["dylib"]
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "// hello")
.build();
// No metadata on libbar since it's a dylib path dependency
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 ({url}/bar)
[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \
--emit=dep-info,link \
-L dependency={dir}[/]target[/]debug[/]deps \
--extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
-dir = p.root().display(),
-url = p.url(),
-prefix = env::consts::DLL_PREFIX,
-suffix = env::consts::DLL_SUFFIX,
-)));
+ dir = p.root().display(),
+ url = p.url(),
+ prefix = env::consts::DLL_PREFIX,
+ suffix = env::consts::DLL_SUFFIX,
+ )),
+ );
assert_that(p.cargo("clean"), execs().with_status(0));
// If you set the env-var, then we expect metadata on libbar
- assert_that(p.cargo("build").arg("-v").env("__CARGO_DEFAULT_LIB_METADATA", "stable"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build")
+ .arg("-v")
+ .env("__CARGO_DEFAULT_LIB_METADATA", "stable"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 ({url}/bar)
[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \
--emit=dep-info,link \
--extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar-[..]{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
-dir = p.root().display(),
-url = p.url(),
-prefix = env::consts::DLL_PREFIX,
-suffix = env::consts::DLL_SUFFIX,
-)));
+ dir = p.root().display(),
+ url = p.url(),
+ prefix = env::consts::DLL_PREFIX,
+ suffix = env::consts::DLL_SUFFIX,
+ )),
+ );
}
#[test]
fn crate_env_vars() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.1-alpha.1"
description = "This is foo"
homepage = "http://example.com"
authors = ["wycats@example.com"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate foo;
VERSION_MINOR, VERSION_PATCH, VERSION_PRE);
assert_eq!(s, VERSION);
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn version() -> String {
format!("{}-{}-{} @ {} in {}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_PRE"),
env!("CARGO_MANIFEST_DIR"))
}
- "#)
+ "#,
+ )
.build();
println!("build");
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
println!("bin");
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout(&format!("0-5-1 @ alpha.1 in {}\n",
- p.root().display())));
+ assert_that(
+ process(&p.bin("foo")),
+ execs()
+ .with_status(0)
+ .with_stdout(&format!("0-5-1 @ alpha.1 in {}\n", p.root().display())),
+ );
println!("test");
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
}
#[test]
fn crate_authors_env_vars() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.1-alpha.1"
authors = ["wycats@example.com", "neikos@example.com"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate foo;
static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS");
println!("{}", AUTHORS);
assert_eq!(s, AUTHORS);
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn authors() -> String {
format!("{}", env!("CARGO_PKG_AUTHORS"))
}
- "#)
+ "#,
+ )
.build();
println!("build");
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
println!("bin");
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("wycats@example.com:neikos@example.com"));
+ assert_that(
+ process(&p.bin("foo")),
+ execs()
+ .with_status(0)
+ .with_stdout("wycats@example.com:neikos@example.com"),
+ );
println!("test");
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
}
// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error
fn setenv_for_removing_empty_component(mut p: ProcessBuilder) -> ProcessBuilder {
let v = dylib_path_envvar();
if let Ok(search_path) = env::var(v) {
- let new_search_path =
- env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty()))
- .expect("join_paths");
+ let new_search_path = env::join_paths(
+ env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty()),
+ ).expect("join_paths");
p.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly
}
p
#[test]
fn crate_library_path_env_var() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", &format!(r##"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r##"
fn main() {{
let search_path = env!("{}");
let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
assert!(!paths.contains(&"".into()));
}}
- "##, dylib_path_envvar()))
+ "##,
+ dylib_path_envvar()
+ ),
+ )
.build();
- assert_that(setenv_for_removing_empty_component(p.cargo("run")),
- execs().with_status(0));
+ assert_that(
+ setenv_for_removing_empty_component(p.cargo("run")),
+ execs().with_status(0),
+ );
}
// Regression test for #4277
#[test]
fn build_with_fake_libc_not_loading() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.file("libc.so.6", r#""#)
.build();
- assert_that(setenv_for_removing_empty_component(p.cargo("build")),
- execs().with_status(0));
+ assert_that(
+ setenv_for_removing_empty_component(p.cargo("build")),
+ execs().with_status(0),
+ );
}
// this is testing that src/<pkg-name>.rs still works (for now)
#[test]
fn many_crate_types_old_style_lib_location() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
name = "foo"
crate_type = ["rlib", "dylib"]
- "#)
- .file("src/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "src/foo.rs",
+ r#"
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"), execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[WARNING] path `[..]src[/]foo.rs` was erroneously implicitly accepted for library `foo`,
-please rename the file to `src/lib.rs` or set lib.path in Cargo.toml"));
+please rename the file to `src/lib.rs` or set lib.path in Cargo.toml",
+ ),
+ );
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
- let fname = format!("{}foo{}", env::consts::DLL_PREFIX,
- env::consts::DLL_SUFFIX);
+ let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
assert_that(&p.root().join("target/debug").join(&fname), existing_file());
}
#[test]
fn many_crate_types_correct() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
name = "foo"
crate_type = ["rlib", "dylib"]
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
- let fname = format!("{}foo{}", env::consts::DLL_PREFIX,
- env::consts::DLL_SUFFIX);
+ let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
assert_that(&p.root().join("target/debug").join(&fname), existing_file());
}
#[test]
fn self_dependency() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
[lib]
name = "test"
path = "src/test.rs"
- "#)
+ "#,
+ )
.file("src/test.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] cyclic package dependency: package `test v0.0.0 ([..])` depends on itself. Cycle:
-package `test v0.0.0 ([..]foo)`"));
+package `test v0.0.0 ([..]foo)`",
+ ),
+ );
}
#[test]
fn ignore_broken_symlinks() {
// windows and symlinks don't currently agree that well
- if cfg!(windows) { return }
+ if cfg!(windows) {
+ return;
+ }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("i am foo\n"));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("i am foo\n"),
+ );
}
#[test]
fn missing_lib_and_bin() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]Cargo.toml`
Caused by:
no targets specified in the manifest
- either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n"));
+ either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n",
+ ),
+ );
}
#[test]
fn lto_build() {
// FIXME: currently this hits a linker bug on 32-bit MSVC
if cfg!(all(target_env = "msvc", target_pointer_width = "32")) {
- return
+ return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
[profile.release]
lto = true
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--release"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v").arg("--release"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]main.rs --crate-type bin \
--emit=dep-info,link \
-L dependency={dir}[/]target[/]release[/]deps`
[FINISHED] release [optimized] target(s) in [..]
",
-dir = p.root().display(),
-url = p.url(),
-)));
+ dir = p.root().display(),
+ url = p.url(),
+ )),
+ );
}
#[test]
fn verbose_build() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
-dir = p.root().display(),
-url = p.url(),
-)));
+ dir = p.root().display(),
+ url = p.url(),
+ )),
+ );
}
#[test]
fn verbose_release_build() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--release"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v").arg("--release"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link \
-L dependency={dir}[/]target[/]release[/]deps`
[FINISHED] release [optimized] target(s) in [..]
",
-dir = p.root().display(),
-url = p.url(),
-)));
+ dir = p.root().display(),
+ url = p.url(),
+ )),
+ );
}
#[test]
fn verbose_release_build_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
[dependencies.foo]
path = "foo"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
[lib]
name = "foo"
crate_type = ["dylib", "rlib"]
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--release"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v").arg("--release"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 ({url}/foo)
[RUNNING] `rustc --crate-name foo foo[/]src[/]lib.rs \
--crate-type dylib --crate-type rlib \
--extern foo={dir}[/]target[/]release[/]deps[/]libfoo.rlib`
[FINISHED] release [optimized] target(s) in [..]
",
- dir = p.root().display(),
- url = p.url(),
- prefix = env::consts::DLL_PREFIX,
- suffix = env::consts::DLL_SUFFIX)));
+ dir = p.root().display(),
+ url = p.url(),
+ prefix = env::consts::DLL_PREFIX,
+ suffix = env::consts::DLL_SUFFIX
+ )),
+ );
}
#[test]
fn explicit_examples() {
let p = project("world")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "world"
version = "1.0.0"
[[example]]
name = "goodbye"
path = "examples/ex-goodbye.rs"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn get_hello() -> &'static str { "Hello" }
pub fn get_goodbye() -> &'static str { "Goodbye" }
pub fn get_world() -> &'static str { "World" }
- "#)
- .file("examples/ex-hello.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/ex-hello.rs",
+ r#"
extern crate world;
fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); }
- "#)
- .file("examples/ex-goodbye.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/ex-goodbye.rs",
+ r#"
extern crate world;
fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); }
- "#)
+ "#,
+ )
.build();
assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
- assert_that(process(&p.bin("examples/hello")),
- execs().with_status(0).with_stdout("Hello, World!\n"));
- assert_that(process(&p.bin("examples/goodbye")),
- execs().with_status(0).with_stdout("Goodbye, World!\n"));
+ assert_that(
+ process(&p.bin("examples/hello")),
+ execs().with_status(0).with_stdout("Hello, World!\n"),
+ );
+ assert_that(
+ process(&p.bin("examples/goodbye")),
+ execs().with_status(0).with_stdout("Goodbye, World!\n"),
+ );
}
#[test]
fn non_existing_example() {
let p = project("world")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "world"
version = "1.0.0"
[[example]]
name = "hello"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/ehlo.rs", "")
.build();
- assert_that(p.cargo("test").arg("-v"), execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("test").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
- can't find `hello` example, specify example.path"));
+ can't find `hello` example, specify example.path",
+ ),
+ );
}
#[test]
fn non_existing_binary() {
let p = project("world")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "world"
version = "1.0.0"
[[bin]]
name = "hello"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/ehlo.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"), execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
- can't find `hello` bin, specify bin.path"));
+ can't find `hello` bin, specify bin.path",
+ ),
+ );
}
#[test]
fn legacy_binary_paths_warinigs() {
let p = project("world")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "1.0.0"
[[bin]]
name = "bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[WARNING] path `[..]src[/]main.rs` was erroneously implicitly accepted for binary `bar`,
-please set bin.path in Cargo.toml"));
+please set bin.path in Cargo.toml",
+ ),
+ );
let p = project("world")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "1.0.0"
[[bin]]
name = "bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[WARNING] path `[..]src[/]bin[/]main.rs` was erroneously implicitly accepted for binary `bar`,
-please set bin.path in Cargo.toml"));
+please set bin.path in Cargo.toml",
+ ),
+ );
let p = project("world")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "1.0.0"
[[bin]]
name = "bar"
- "#)
+ "#,
+ )
.file("src/bar.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[WARNING] path `[..]src[/]bar.rs` was erroneously implicitly accepted for binary `bar`,
-please set bin.path in Cargo.toml"));
+please set bin.path in Cargo.toml",
+ ),
+ );
}
#[test]
fn implicit_examples() {
let p = project("world")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "world"
version = "1.0.0"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn get_hello() -> &'static str { "Hello" }
pub fn get_goodbye() -> &'static str { "Goodbye" }
pub fn get_world() -> &'static str { "World" }
- "#)
- .file("examples/hello.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/hello.rs",
+ r#"
extern crate world;
fn main() {
println!("{}, {}!", world::get_hello(), world::get_world());
}
- "#)
- .file("examples/goodbye.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/goodbye.rs",
+ r#"
extern crate world;
fn main() {
println!("{}, {}!", world::get_goodbye(), world::get_world());
}
- "#)
+ "#,
+ )
.build();
assert_that(p.cargo("test"), execs().with_status(0));
- assert_that(process(&p.bin("examples/hello")),
- execs().with_status(0).with_stdout("Hello, World!\n"));
- assert_that(process(&p.bin("examples/goodbye")),
- execs().with_status(0).with_stdout("Goodbye, World!\n"));
+ assert_that(
+ process(&p.bin("examples/hello")),
+ execs().with_status(0).with_stdout("Hello, World!\n"),
+ );
+ assert_that(
+ process(&p.bin("examples/goodbye")),
+ execs().with_status(0).with_stdout("Goodbye, World!\n"),
+ );
}
#[test]
fn standard_build_no_ndebug() {
let p = project("world")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/foo.rs", r#"
+ .file(
+ "src/foo.rs",
+ r#"
fn main() {
if cfg!(debug_assertions) {
println!("slow")
println!("fast")
}
}
- "#)
+ "#,
+ )
.build();
assert_that(p.cargo("build"), execs().with_status(0));
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("slow\n"));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("slow\n"),
+ );
}
#[test]
fn release_build_ndebug() {
let p = project("world")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/foo.rs", r#"
+ .file(
+ "src/foo.rs",
+ r#"
fn main() {
if cfg!(debug_assertions) {
println!("slow")
println!("fast")
}
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("--release"),
- execs().with_status(0));
- assert_that(process(&p.release_bin("foo")),
- execs().with_status(0).with_stdout("fast\n"));
+ assert_that(p.cargo("build").arg("--release"), execs().with_status(0));
+ assert_that(
+ process(&p.release_bin("foo")),
+ execs().with_status(0).with_stdout("fast\n"),
+ );
}
#[test]
fn inferred_main_bin() {
let p = project("world")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
assert_that(p.cargo("build"), execs().with_status(0));
#[test]
fn deletion_causes_failure() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
- p.change_file("Cargo.toml", r#"
+ p.change_file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#);
+ "#,
+ );
assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn bad_cargo_toml_in_target_dir() {
let p = project("world")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("target/Cargo.toml", "bad-toml")
.build();
#[test]
fn lib_with_standard_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate syntax;
fn main() { syntax::foo() }
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn simple_staticlib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[lib]
name = "foo"
crate-type = ["staticlib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
// env var is a test for #1381
- assert_that(p.cargo("build").env("RUST_LOG", "nekoneko=trace"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").env("RUST_LOG", "nekoneko=trace"),
+ execs().with_status(0),
+ );
}
#[test]
fn staticlib_rlib_and_bin() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[lib]
name = "foo"
crate-type = ["staticlib", "rlib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
extern crate foo;
fn main() {
foo::foo();
- }"#)
+ }"#,
+ )
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
#[test]
fn opt_out_of_bin() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
bin = []
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "bad syntax")
.build();
#[test]
fn single_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[lib]
name = "foo"
path = "src/bar.rs"
- "#)
+ "#,
+ )
.file("src/bar.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
#[test]
fn freshness_ignores_excluded() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = "build.rs"
exclude = ["src/b*.rs"]
- "#)
+ "#,
+ )
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.build();
foo.root().move_into_the_past();
- assert_that(foo.cargo("build"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ foo.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = foo.url())));
+",
+ url = foo.url()
+ )),
+ );
// Smoke test to make sure it doesn't compile again
println!("first pass");
- assert_that(foo.cargo("build"),
- execs().with_status(0)
- .with_stdout(""));
+ assert_that(foo.cargo("build"), execs().with_status(0).with_stdout(""));
// Modify an ignored file and make sure we don't rebuild
println!("second pass");
File::create(&foo.root().join("src/bar.rs")).unwrap();
- assert_that(foo.cargo("build"),
- execs().with_status(0)
- .with_stdout(""));
+ assert_that(foo.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[test]
fn rebuild_preserves_out_dir() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = 'build.rs'
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
use std::env;
use std::fs::File;
use std::path::Path;
File::create(&path).unwrap();
}
}
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.build();
foo.root().move_into_the_past();
- assert_that(foo.cargo("build").env("FIRST", "1"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ foo.cargo("build").env("FIRST", "1"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = foo.url())));
+",
+ url = foo.url()
+ )),
+ );
File::create(&foo.root().join("src/bar.rs")).unwrap();
- assert_that(foo.cargo("build"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ foo.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = foo.url())));
+",
+ url = foo.url()
+ )),
+ );
}
#[test]
fn dep_no_libs() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies.bar]
path = "bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "")
.build();
- assert_that(foo.cargo("build"),
- execs().with_status(0));
+ assert_that(foo.cargo("build"), execs().with_status(0));
}
#[test]
fn recompile_space_in_name() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[lib]
name = "foo"
path = "src/my lib.rs"
- "#)
+ "#,
+ )
.file("src/my lib.rs", "")
.build();
assert_that(foo.cargo("build"), execs().with_status(0));
foo.root().move_into_the_past();
- assert_that(foo.cargo("build"),
- execs().with_status(0).with_stdout(""));
+ assert_that(foo.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[cfg(unix)]
fn ignore_bad_directories() {
use std::os::unix::prelude::*;
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let dir = foo.root().join("tmp");
let mut perms = stat.permissions();
perms.set_mode(0o644);
fs::set_permissions(&dir, perms.clone()).unwrap();
- assert_that(foo.cargo("build"),
- execs().with_status(0));
+ assert_that(foo.cargo("build"), execs().with_status(0));
perms.set_mode(0o755);
fs::set_permissions(&dir, perms).unwrap();
}
#[test]
fn bad_cargo_config() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
this is not valid toml
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("build").arg("-v"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ foo.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] Couldn't load Cargo configuration
Caused by:
Caused by:
expected an equals, found an identifier at line 2
-"));
+",
+ ),
+ );
}
#[test]
fn cargo_platform_specific_dependency() {
let host = rustc_host();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
build = {{ path = "build" }}
[target.{host}.dev-dependencies]
dev = {{ path = "dev" }}
- "#, host = host))
- .file("src/main.rs", r#"
+ "#,
+ host = host
+ ),
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate dep;
fn main() { dep::dep() }
- "#)
- .file("tests/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/foo.rs",
+ r#"
extern crate dev;
#[test]
fn foo() { dev::dev() }
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
extern crate build;
fn main() { build::build(); }
- "#)
- .file("dep/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "dep/Cargo.toml",
+ r#"
[project]
name = "dep"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
+ "#,
+ )
.file("dep/src/lib.rs", "pub fn dep() {}")
- .file("build/Cargo.toml", r#"
+ .file(
+ "build/Cargo.toml",
+ r#"
[project]
name = "build"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
+ "#,
+ )
.file("build/src/lib.rs", "pub fn build() {}")
- .file("dev/Cargo.toml", r#"
+ .file(
+ "dev/Cargo.toml",
+ r#"
[project]
name = "dev"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
+ "#,
+ )
.file("dev/src/lib.rs", "pub fn dev() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
- assert_that(p.cargo("test"),
- execs().with_status(0));
+ assert_that(p.cargo("test"), execs().with_status(0));
}
#[test]
fn bad_platform_specific_dependency() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[target.wrong-target.dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
extern crate baz;
pub fn gimme() -> String {
format!("")
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101));
+ assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn cargo_platform_specific_dependency_wrong_platform() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[target.non-existing-triplet.dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
invalid rust file, should not be compiled
- "#)
+ "#,
+ )
.build();
p.cargo("build").exec_with_output().unwrap();
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_status(0));
+ assert_that(process(&p.bin("foo")), execs().with_status(0));
let loc = p.root().join("Cargo.lock");
let mut lockfile = String::new();
- File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap();
+ File::open(&loc)
+ .unwrap()
+ .read_to_string(&mut lockfile)
+ .unwrap();
assert!(lockfile.contains("bar"))
}
#[test]
fn example_as_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[example]]
name = "ex"
crate-type = ["lib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
#[test]
fn example_as_rlib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[example]]
name = "ex"
crate-type = ["rlib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
#[test]
fn example_as_dylib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[example]]
name = "ex"
crate-type = ["dylib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[example]]
name = "ex"
crate-type = ["proc-macro"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/ex.rs", "#![feature(proc_macro)]")
.build();
#[test]
fn example_bin_same_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("examples/foo.rs", "fn main() {}")
.build();
- p.cargo("test").arg("--no-run").arg("-v")
+ p.cargo("test")
+ .arg("--no-run")
+ .arg("-v")
.exec_with_output()
.unwrap();
// We expect a file of the form bin/foo-{metadata_hash}
assert_that(&p.bin("examples/foo"), existing_file());
- p.cargo("test").arg("--no-run").arg("-v")
- .exec_with_output()
- .unwrap();
+ p.cargo("test")
+ .arg("--no-run")
+ .arg("-v")
+ .exec_with_output()
+ .unwrap();
assert_that(&p.bin("foo"), is_not(existing_file()));
// We expect a file of the form bin/foo-{metadata_hash}
#[test]
fn compile_then_delete() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
sleep_ms(100);
}
fs::remove_file(&p.bin("foo")).unwrap();
- assert_that(p.cargo("run").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("run").arg("-v"), execs().with_status(0));
}
#[test]
fn transitive_dependencies_not_available() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.aaaaa]
path = "a"
- "#)
- .file("src/main.rs", "extern crate bbbbb; extern crate aaaaa; fn main() {}")
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "extern crate bbbbb; extern crate aaaaa; fn main() {}",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "aaaaa"
version = "0.0.1"
[dependencies.bbbbb]
path = "../b"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "extern crate bbbbb;")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "bbbbb"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr_contains(
+ "\
[..] can't find crate for `bbbbb`[..]
-"));
+",
+ ),
+ );
}
#[test]
fn cyclic_deps_rejected() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[dependencies.foo]
path = ".."
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
#[test]
fn predictable_filenames() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
crate-type = ["dylib", "rlib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
- let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX,
- env::consts::DLL_SUFFIX);
- assert_that(&p.root().join("target/debug").join(dylib_name),
- existing_file());
+ let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+ assert_that(
+ &p.root().join("target/debug").join(dylib_name),
+ existing_file(),
+ );
}
#[test]
fn dashes_to_underscores() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo-bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "extern crate foo_bar; fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
assert_that(&p.bin("foo-bar"), existing_file());
}
#[test]
fn dashes_in_crate_name_bad() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo-bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "extern crate foo_bar; fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(101));
}
#[test]
fn rustc_env_var() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build")
- .env("RUSTC", "rustc-that-does-not-exist").arg("-v"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .env("RUSTC", "rustc-that-does-not-exist")
+ .arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..])
Caused by:
[..]
-"));
+",
+ ),
+ );
assert_that(&p.bin("a"), is_not(existing_file()));
}
#[test]
fn filtering() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--lib"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--lib"), execs().with_status(0));
assert_that(&p.bin("a"), is_not(existing_file()));
- assert_that(p.cargo("build").arg("--bin=a").arg("--example=a"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--bin=a").arg("--example=a"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("a"), existing_file());
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/a"), existing_file());
#[test]
fn filtering_implicit_bins() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--bins"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--bins"), execs().with_status(0));
assert_that(&p.bin("a"), existing_file());
assert_that(&p.bin("b"), existing_file());
assert_that(&p.bin("examples/a"), is_not(existing_file()));
#[test]
fn filtering_implicit_examples() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--examples"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--examples"), execs().with_status(0));
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/a"), existing_file());
#[test]
fn ignore_dotfile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/.a.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn ignore_dotdirs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/a.rs", "fn main() {}")
.file(".git/Cargo.toml", "")
.file(".pc/dummy-fix.patch/Cargo.toml", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn dotdir_root() {
let p = ProjectBuilder::new("foo", root().join(".foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/a.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
-
#[test]
fn custom_target_dir() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
let exe_name = format!("foo{}", env::consts::EXE_SUFFIX);
- assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"),
- execs().with_status(0));
- assert_that(&p.root().join("foo/target/debug").join(&exe_name),
- existing_file());
- assert_that(&p.root().join("target/debug").join(&exe_name),
- is_not(existing_file()));
+ assert_that(
+ p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"),
+ execs().with_status(0),
+ );
+ assert_that(
+ &p.root().join("foo/target/debug").join(&exe_name),
+ existing_file(),
+ );
+ assert_that(
+ &p.root().join("target/debug").join(&exe_name),
+ is_not(existing_file()),
+ );
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(&p.root().join("foo/target/debug").join(&exe_name),
- existing_file());
- assert_that(&p.root().join("target/debug").join(&exe_name),
- existing_file());
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(
+ &p.root().join("foo/target/debug").join(&exe_name),
+ existing_file(),
+ );
+ assert_that(
+ &p.root().join("target/debug").join(&exe_name),
+ existing_file(),
+ );
fs::create_dir(p.root().join(".cargo")).unwrap();
- File::create(p.root().join(".cargo/config")).unwrap().write_all(br#"
+ File::create(p.root().join(".cargo/config"))
+ .unwrap()
+ .write_all(
+ br#"
[build]
target-dir = "foo/target"
- "#).unwrap();
- assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "bar/target"),
- execs().with_status(0));
- assert_that(&p.root().join("bar/target/debug").join(&exe_name),
- existing_file());
- assert_that(&p.root().join("foo/target/debug").join(&exe_name),
- existing_file());
- assert_that(&p.root().join("target/debug").join(&exe_name),
- existing_file());
+ "#,
+ )
+ .unwrap();
+ assert_that(
+ p.cargo("build").env("CARGO_TARGET_DIR", "bar/target"),
+ execs().with_status(0),
+ );
+ assert_that(
+ &p.root().join("bar/target/debug").join(&exe_name),
+ existing_file(),
+ );
+ assert_that(
+ &p.root().join("foo/target/debug").join(&exe_name),
+ existing_file(),
+ );
+ assert_that(
+ &p.root().join("target/debug").join(&exe_name),
+ existing_file(),
+ );
}
#[test]
fn rustc_no_trans() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"),
+ execs().with_status(0),
+ );
}
#[test]
fn build_multiple_packages() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name = "foo"
- "#)
+ "#,
+ )
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
- .file("d1/Cargo.toml", r#"
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
[[bin]]
name = "d1"
- "#)
+ "#,
+ )
.file("d1/src/lib.rs", "")
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
- .file("d2/Cargo.toml", r#"
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.1"
[[bin]]
name = "d2"
doctest = false
- "#)
+ "#,
+ )
.file("d2/src/main.rs", "fn main() { println!(\"d2\"); }")
.build();
- assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("d2")
- .arg("-p").arg("foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build")
+ .arg("-p")
+ .arg("d1")
+ .arg("-p")
+ .arg("d2")
+ .arg("-p")
+ .arg("foo"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("i am foo\n"));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("i am foo\n"),
+ );
- let d1_path = &p.build_dir().join("debug")
- .join(format!("d1{}", env::consts::EXE_SUFFIX));
- let d2_path = &p.build_dir().join("debug")
- .join(format!("d2{}", env::consts::EXE_SUFFIX));
+ let d1_path = &p.build_dir()
+ .join("debug")
+ .join(format!("d1{}", env::consts::EXE_SUFFIX));
+ let d2_path = &p.build_dir()
+ .join("debug")
+ .join(format!("d2{}", env::consts::EXE_SUFFIX));
assert_that(d1_path, existing_file());
assert_that(process(d1_path), execs().with_status(0).with_stdout("d1"));
assert_that(d2_path, existing_file());
- assert_that(process(d2_path),
- execs().with_status(0).with_stdout("d2"));
+ assert_that(process(d2_path), execs().with_status(0).with_stdout("d2"));
}
#[test]
fn invalid_spec() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name = "foo"
- "#)
+ "#,
+ )
.file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[]))
- .file("d1/Cargo.toml", r#"
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
[[bin]]
name = "d1"
- "#)
+ "#,
+ )
.file("d1/src/lib.rs", "")
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
.build();
- assert_that(p.cargo("build").arg("-p").arg("notAValidDep"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-p").arg("notAValidDep"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] package id specification `notAValidDep` matched no packages
-"));
+",
+ ),
+ );
- assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("notAValidDep"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .arg("-p")
+ .arg("d1")
+ .arg("-p")
+ .arg("notAValidDep"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] package id specification `notAValidDep` matched no packages
-"));
+",
+ ),
+ );
}
#[test]
fn manifest_with_bom_is_ok() {
let p = project("foo")
- .file("Cargo.toml", "\u{FEFF}
+ .file(
+ "Cargo.toml",
+ "\u{FEFF}
[package]
name = \"foo\"
version = \"0.0.1\"
authors = []
- ")
+ ",
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn panic_abort_compiles_with_panic_abort() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[profile.dev]
panic = 'abort'
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("[..] -C panic=abort [..]"));
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] -C panic=abort [..]"),
+ );
}
#[test]
fn explicit_color_config_is_propagated_to_rustc() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--color").arg("always"),
- execs().with_status(0).with_stderr_contains(
- "[..]rustc [..] src[/]lib.rs --color always[..]"));
+ assert_that(
+ p.cargo("build").arg("-v").arg("--color").arg("always"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..]rustc [..] src[/]lib.rs --color always[..]"),
+ );
assert_that(p.cargo("clean"), execs().with_status(0));
- assert_that(p.cargo("build").arg("-v").arg("--color").arg("never"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v").arg("--color").arg("never"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] test v0.0.0 ([..])
[RUNNING] `rustc [..] --color never [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn compiler_json_error_format() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[dependencies.bar]
path = "bar"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() { let unused = 92; }")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", r#"fn dead() {}"#)
.build();
- assert_that(p.cargo("build").arg("-v")
- .arg("--message-format").arg("json"),
- execs().with_status(0).with_json(r#"
+ assert_that(
+ p.cargo("build")
+ .arg("-v")
+ .arg("--message-format")
+ .arg("json"),
+ execs().with_status(0).with_json(
+ r#"
{
"reason":"compiler-message",
"package_id":"bar 0.5.0 ([..])",
"filenames": "{...}",
"fresh": false
}
-"#));
+"#,
+ ),
+ );
// With fresh build, we should repeat the artifacts,
// but omit compiler warnings.
- assert_that(p.cargo("build").arg("-v")
- .arg("--message-format").arg("json"),
- execs().with_status(0).with_json(r#"
+ assert_that(
+ p.cargo("build")
+ .arg("-v")
+ .arg("--message-format")
+ .arg("json"),
+ execs().with_status(0).with_json(
+ r#"
{
"reason":"compiler-artifact",
"profile": {
"filenames": "{...}",
"fresh": true
}
-"#));
+"#,
+ ),
+ );
}
#[test]
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--message-format").arg("XML"),
- execs().with_status(1)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("--message-format").arg("XML"),
+ execs().with_status(1).with_stderr_contains(
+ "\
error: 'XML' isn't a valid value for '--message-format <FMT>'
<tab>[possible values: human, json]
-"));
+",
+ ),
+ );
}
#[test]
.file("src/main.rs", "fn main() { let unused = 0; }")
.build();
- assert_that(p.cargo("rustc").arg("--release").arg("--bin").arg("foo")
- .arg("--message-format").arg("JSON"),
- execs().with_status(0)
- .with_json(r#"
+ assert_that(
+ p.cargo("rustc")
+ .arg("--release")
+ .arg("--bin")
+ .arg("foo")
+ .arg("--message-format")
+ .arg("JSON"),
+ execs().with_status(0).with_json(
+ r#"
{
"reason":"compiler-message",
"package_id":"foo 0.5.0 ([..])",
"filenames": "{...}",
"fresh": false
}
-"#));
+"#,
+ ),
+ );
}
#[test]
fn no_warn_about_package_metadata() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[package.metadata.another]
bar = 3
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("[..] foo v0.0.1 ([..])\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n"));
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "[..] foo v0.0.1 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ ),
+ );
}
#[test]
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--target").arg(""),
- execs().with_status(101)
- .with_stderr_contains("[..] target was empty"));
+ assert_that(
+ p.cargo("build").arg("--target").arg(""),
+ execs()
+ .with_status(101)
+ .with_stderr_contains("[..] target was empty"),
+ );
}
#[test]
fn build_all_workspace() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
bar = { path = "bar" }
[workspace]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build")
- .arg("--all"),
- execs().with_status(0)
- .with_stderr("[..] Compiling bar v0.1.0 ([..])\n\
- [..] Compiling foo v0.1.0 ([..])\n\
- [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
+ assert_that(
+ p.cargo("build").arg("--all"),
+ execs().with_status(0).with_stderr(
+ "[..] Compiling bar v0.1.0 ([..])\n\
+ [..] Compiling foo v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ ),
+ );
}
#[test]
fn build_all_exclude() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar", "baz"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.1.0"
- "#)
- .file("baz/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/lib.rs",
+ r#"
pub fn baz() {
break_the_build();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build")
- .arg("--all")
- .arg("--exclude")
- .arg("baz"),
- execs().with_status(0)
- .with_stderr_contains("[..]Compiling foo v0.1.0 [..]")
- .with_stderr_contains("[..]Compiling bar v0.1.0 [..]")
- .with_stderr_does_not_contain("[..]Compiling baz v0.1.0 [..]"));
+ assert_that(
+ p.cargo("build").arg("--all").arg("--exclude").arg("baz"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..]Compiling foo v0.1.0 [..]")
+ .with_stderr_contains("[..]Compiling bar v0.1.0 [..]")
+ .with_stderr_does_not_contain("[..]Compiling baz v0.1.0 [..]"),
+ );
}
#[test]
fn build_all_workspace_implicit_examples() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
bar = { path = "bar" }
[workspace]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/c.rs", "fn main() {}")
.file("examples/d.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.file("bar/src/bin/e.rs", "fn main() {}")
.file("bar/src/bin/f.rs", "fn main() {}")
.file("bar/examples/h.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build")
- .arg("--all").arg("--examples"),
- execs().with_status(0)
- .with_stderr("[..] Compiling bar v0.1.0 ([..])\n\
- [..] Compiling foo v0.1.0 ([..])\n\
- [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
+ assert_that(
+ p.cargo("build").arg("--all").arg("--examples"),
+ execs().with_status(0).with_stderr(
+ "[..] Compiling bar v0.1.0 ([..])\n\
+ [..] Compiling foo v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ ),
+ );
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/c"), existing_file());
#[test]
fn build_all_virtual_manifest() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
// The order in which foo and bar are built is not guaranteed
- assert_that(p.cargo("build")
- .arg("--all"),
- execs().with_status(0)
- .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
- .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
- .with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
- [..] Compiling [..] v0.1.0 ([..])\n\
- [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
+ assert_that(
+ p.cargo("build").arg("--all"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
+ .with_stderr(
+ "[..] Compiling [..] v0.1.0 ([..])\n\
+ [..] Compiling [..] v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ ),
+ );
}
#[test]
fn build_virtual_manifest_all_implied() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
// The order in which foo and bar are built is not guaranteed
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
- .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
- .with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
- [..] Compiling [..] v0.1.0 ([..])\n\
- [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
+ assert_that(
+ p.cargo("build"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
+ .with_stderr(
+ "[..] Compiling [..] v0.1.0 ([..])\n\
+ [..] Compiling [..] v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ ),
+ );
}
#[test]
fn build_virtual_manifest_one_project() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build")
- .arg("-p").arg("foo"),
- execs().with_status(0)
- .with_stderr_does_not_contain("bar")
- .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
- .with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
- [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
+ assert_that(
+ p.cargo("build").arg("-p").arg("foo"),
+ execs()
+ .with_status(0)
+ .with_stderr_does_not_contain("bar")
+ .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
+ .with_stderr(
+ "[..] Compiling [..] v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ ),
+ );
}
#[test]
fn build_all_virtual_manifest_implicit_examples() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
.file("foo/src/bin/a.rs", "fn main() {}")
.file("foo/src/bin/b.rs", "fn main() {}")
.file("foo/examples/c.rs", "fn main() {}")
.file("foo/examples/d.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.file("bar/src/bin/e.rs", "fn main() {}")
.file("bar/src/bin/f.rs", "fn main() {}")
.build();
// The order in which foo and bar are built is not guaranteed
- assert_that(p.cargo("build")
- .arg("--all").arg("--examples"),
- execs().with_status(0)
- .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
- .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
- .with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
- [..] Compiling [..] v0.1.0 ([..])\n\
- [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
+ assert_that(
+ p.cargo("build").arg("--all").arg("--examples"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
+ .with_stderr(
+ "[..] Compiling [..] v0.1.0 ([..])\n\
+ [..] Compiling [..] v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ ),
+ );
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/c"), existing_file());
#[test]
fn build_all_member_dependency_same_name() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["a"]
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.1.0"
[dependencies]
a = "0.1.0"
- "#)
- .file("a/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
pub fn a() {}
- "#)
+ "#,
+ )
.build();
Package::new("a", "0.1.0").publish();
- assert_that(p.cargo("build")
- .arg("--all"),
- execs().with_status(0)
- .with_stderr("[..] Updating registry `[..]`\n\
- [..] Downloading a v0.1.0 ([..])\n\
- [..] Compiling a v0.1.0\n\
- [..] Compiling a v0.1.0 ([..])\n\
- [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
+ assert_that(
+ p.cargo("build").arg("--all"),
+ execs().with_status(0).with_stderr(
+ "[..] Updating registry `[..]`\n\
+ [..] Downloading a v0.1.0 ([..])\n\
+ [..] Compiling a v0.1.0\n\
+ [..] Compiling a v0.1.0 ([..])\n\
+ [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+ ),
+ );
}
#[test]
fn run_proper_binary() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
name = "main"
[[bin]]
name = "other"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("src/bin/main.rs", r#"
+ .file(
+ "src/bin/main.rs",
+ r#"
fn main() {
panic!("This should never be run.");
}
- "#)
- .file("src/bin/other.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/other.rs",
+ r#"
fn main() {
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--bin").arg("other"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("run").arg("--bin").arg("other"),
+ execs().with_status(0),
+ );
}
#[test]
fn run_proper_binary_main_rs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("src/bin/main.rs", r#"
+ .file(
+ "src/bin/main.rs",
+ r#"
fn main() {
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--bin").arg("foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("run").arg("--bin").arg("foo"),
+ execs().with_status(0),
+ );
}
#[test]
fn run_proper_alias_binary_from_src() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
name = "foo"
[[bin]]
name = "bar"
- "#)
- .file("src/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "src/foo.rs",
+ r#"
fn main() {
println!("foo");
}
- "#).file("src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bar.rs",
+ r#"
fn main() {
println!("bar");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build")
- .arg("--all"),
- execs().with_status(0)
- );
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("foo\n"));
- assert_that(process(&p.bin("bar")),
- execs().with_status(0).with_stdout("bar\n"));
+ assert_that(p.cargo("build").arg("--all"), execs().with_status(0));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("foo\n"),
+ );
+ assert_that(
+ process(&p.bin("bar")),
+ execs().with_status(0).with_stdout("bar\n"),
+ );
}
#[test]
fn run_proper_alias_binary_main_rs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
name = "foo"
[[bin]]
name = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
println!("main");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build")
- .arg("--all"),
- execs().with_status(0)
- );
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("main\n"));
- assert_that(process(&p.bin("bar")),
- execs().with_status(0).with_stdout("main\n"));
+ assert_that(p.cargo("build").arg("--all"), execs().with_status(0));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("main\n"),
+ );
+ assert_that(
+ process(&p.bin("bar")),
+ execs().with_status(0).with_stdout("main\n"),
+ );
}
#[test]
fn run_proper_binary_main_rs_as_foo() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
- "#)
- .file("src/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "src/foo.rs",
+ r#"
fn main() {
panic!("This should never be run.");
}
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--bin").arg("foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("run").arg("--bin").arg("foo"),
+ execs().with_status(0),
+ );
}
#[test]
fn rustc_wrapper() {
// We don't have /usr/bin/env on Windows.
- if cfg!(windows) { return }
+ if cfg!(windows) {
+ return;
+ }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("build").arg("-v").env("RUSTC_WRAPPER", "/usr/bin/env"),
- execs().with_stderr_contains(
- "[RUNNING] `/usr/bin/env rustc --crate-name foo [..]")
- .with_status(0));
+ assert_that(
+ p.cargo("build")
+ .arg("-v")
+ .env("RUSTC_WRAPPER", "/usr/bin/env"),
+ execs()
+ .with_stderr_contains("[RUNNING] `/usr/bin/env rustc --crate-name foo [..]")
+ .with_status(0),
+ );
}
#[test]
fn cdylib_not_lifted() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
authors = []
[lib]
crate-type = ["cdylib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
for file in files {
println!("checking: {}", file);
- assert_that(&p.root().join("target/debug/deps").join(&file),
- existing_file());
+ assert_that(
+ &p.root().join("target/debug/deps").join(&file),
+ existing_file(),
+ );
}
}
#[test]
fn cdylib_final_outputs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo-bar"
authors = []
[lib]
crate-type = ["cdylib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
// This bug is non-deterministic
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
f_b = []
f_c = []
f_d = []
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-cfg=cfg_a");
println!("cargo:rustc-cfg=cfg_b");
println!("cargo:rustc-cfg=cfg_d");
println!("cargo:rustc-cfg=cfg_e");
}
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.1.0 [..]
[RUNNING] [..]
[RUNNING] [..]
--cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\
--cfg[..]f_c[..]--cfg[..]f_d[..] \
--cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e`
-[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"));
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ ),
+ );
}
#[test]
fn explicit_bins_without_paths() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[[bin]]
name = "bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
#[test]
fn no_bin_in_src_with_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[[bin]]
name = "foo"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/foo.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
- can't find `foo` bin, specify bin.path"));
+ can't find `foo` bin, specify bin.path",
+ ),
+ );
}
-
#[test]
fn inferred_bins() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
.file("src/bin/baz/main.rs", "fn main() {}")
fn inferred_bins_duplicate_name() {
// this should fail, because we have two binaries with the same name
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("src/bin/foo.rs", "fn main() {}")
.file("src/bin/foo/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
[..]found duplicate binary name foo, but all binary targets must have a unique name[..]
-"));
+",
+ ),
+ );
}
#[test]
fn inferred_bin_path() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[[bin]]
name = "bar"
# Note, no `path` key!
- "#)
+ "#,
+ )
.file("src/bin/bar/main.rs", "fn main() {}")
.build();
#[test]
fn inferred_examples() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "fn main() {}")
.file("examples/bar.rs", "fn main() {}")
.file("examples/baz/main.rs", "fn main() {}")
#[test]
fn inferred_tests() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "fn main() {}")
.file("tests/bar.rs", "fn main() {}")
.file("tests/baz/main.rs", "fn main() {}")
assert_that(
p.cargo("test").arg("--test=bar").arg("--test=baz"),
- execs().with_status(0));
+ execs().with_status(0),
+ );
}
#[test]
fn inferred_benchmarks() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "fn main() {}")
.file("benches/bar.rs", "fn main() {}")
.file("benches/baz/main.rs", "fn main() {}")
assert_that(
p.cargo("bench").arg("--bench=bar").arg("--bench=baz"),
- execs().with_status(0));
+ execs().with_status(0),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
let output = t!(String::from_utf8(
- t!(p.cargo("build").arg("-v").exec_with_output())
- .stderr,
+ t!(p.cargo("build").arg("-v").exec_with_output()).stderr,
));
let metadata = output
.split_whitespace()
assert_that(
p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr_contains(
- format!("[..]{}[..]", metadata),
- ),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(format!("[..]{}[..]", metadata)),
);
}
#[test]
fn building_a_dependent_crate_witout_bin_should_fail() {
Package::new("testless", "0.1.0")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "testless"
version = "0.1.0"
[[bin]]
name = "a_bin"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
testless = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr_contains(
- "[..]can't find `a_bin` bin, specify bin.path"
- ));
+ assert_that(
+ p.cargo("build"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains("[..]can't find `a_bin` bin, specify bin.path"),
+ );
}
#[test]
fn uplift_dsym_of_bin_on_mac() {
if !cfg!(any(target_os = "macos", target_os = "ios")) {
- return
+ return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() { panic!(); }")
.file("src/bin/b.rs", "fn main() { panic!(); }")
.file("examples/c.rs", "fn main() { panic!(); }")
.build();
assert_that(
- p.cargo("build").arg("--bins").arg("--examples").arg("--tests"),
- execs().with_status(0)
+ p.cargo("build")
+ .arg("--bins")
+ .arg("--examples")
+ .arg("--tests"),
+ execs().with_status(0),
);
assert_that(&p.bin("foo.dSYM"), existing_dir());
assert_that(&p.bin("b.dSYM"), existing_dir());
#[test]
fn uplift_pdb_of_bin_on_windows() {
if !cfg!(all(target_os = "windows", target_env = "msvc")) {
- return
+ return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() { panic!(); }")
.file("src/bin/b.rs", "fn main() { panic!(); }")
.file("examples/c.rs", "fn main() { panic!(); }")
.build();
assert_that(
- p.cargo("build").arg("--bins").arg("--examples").arg("--tests"),
- execs().with_status(0)
+ p.cargo("build")
+ .arg("--bins")
+ .arg("--examples")
+ .arg("--tests"),
+ execs().with_status(0),
);
assert_that(&p.target_debug_dir().join("foo.pdb"), existing_file());
assert_that(&p.target_debug_dir().join("b.pdb"), existing_file());
#[test]
fn build_filter_infer_profile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("tests/t1.rs", "")
.file("examples/ex1.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("\
- [RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
- --emit=dep-info,link[..]")
- .with_stderr_contains("\
- [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
- --emit=dep-info,link[..]")
- );
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(
+ "\
+ [RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
+ --emit=dep-info,link[..]",
+ )
+ .with_stderr_contains(
+ "\
+ [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
+ --emit=dep-info,link[..]",
+ ),
+ );
p.root().join("target").rm_rf();
- assert_that(p.cargo("build").arg("-v").arg("--test=t1"),
- execs().with_status(0)
- .with_stderr_contains("\
- [RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
- --emit=dep-info,link[..]")
- .with_stderr_contains("\
- [RUNNING] `rustc --crate-name t1 tests[/]t1.rs --emit=dep-info,link[..]")
- .with_stderr_contains("\
- [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
- --emit=dep-info,link[..]")
- );
+ assert_that(
+ p.cargo("build").arg("-v").arg("--test=t1"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(
+ "\
+ [RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
+ --emit=dep-info,link[..]",
+ )
+ .with_stderr_contains(
+ "\
+ [RUNNING] `rustc --crate-name t1 tests[/]t1.rs --emit=dep-info,link[..]",
+ )
+ .with_stderr_contains(
+ "\
+ [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
+ --emit=dep-info,link[..]",
+ ),
+ );
p.root().join("target").rm_rf();
- assert_that(p.cargo("build").arg("-v").arg("--bench=b1"),
- execs().with_status(0)
- .with_stderr_contains("\
- [RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
- --emit=dep-info,link[..]")
- .with_stderr_contains("\
- [RUNNING] `rustc --crate-name b1 benches[/]b1.rs --emit=dep-info,link \
- -C opt-level=3[..]")
- .with_stderr_contains("\
- [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
- --emit=dep-info,link[..]")
- );
+ assert_that(
+ p.cargo("build").arg("-v").arg("--bench=b1"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(
+ "\
+ [RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
+ --emit=dep-info,link[..]",
+ )
+ .with_stderr_contains(
+ "\
+ [RUNNING] `rustc --crate-name b1 benches[/]b1.rs --emit=dep-info,link \
+ -C opt-level=3[..]",
+ )
+ .with_stderr_contains(
+ "\
+ [RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
+ --emit=dep-info,link[..]",
+ ),
+ );
}
#[test]
fn all_targets_no_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--all-targets"),
+ assert_that(
+ p.cargo("build").arg("-v").arg("--all-targets"),
execs().with_status(0)
// bin
.with_stderr_contains("\
// unit test
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \
- -C debuginfo=2 --test [..]")
- );
+ -C debuginfo=2 --test [..]"),
+ );
}
#[test]
fn no_linkable_target() {
// Issue 3169. This is currently not an error as per discussion in PR #4797
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[dependencies]
the_lib = { path = "the_lib" }
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("the_lib/Cargo.toml", r#"
+ .file(
+ "the_lib/Cargo.toml",
+ r#"
[package]
name = "the_lib"
version = "0.1.0"
[lib]
name = "the_lib"
crate-type = ["staticlib"]
- "#)
+ "#,
+ )
.file("the_lib/src/lib.rs", "pub fn foo() {}")
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_status(0)
- .with_stderr_contains("\
- [WARNING] The package `the_lib` provides no linkable [..] \
-while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]"));
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr_contains(
+ "\
+ [WARNING] The package `the_lib` provides no linkable [..] \
+ while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]",
+ ),
+ );
}
#[test]
fn avoid_dev_deps() {
Package::new("foo", "1.0.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dev-dependencies]
baz = "1.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
// --bins is needed because of #5134
- assert_that(p.cargo("build").arg("--bins"),
- execs().with_status(101));
- assert_that(p.cargo("build").arg("--bins")
- .masquerade_as_nightly_cargo()
- .arg("-Zavoid-dev-deps"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--bins"), execs().with_status(101));
+ assert_that(
+ p.cargo("build")
+ .arg("--bins")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zavoid-dev-deps"),
+ execs().with_status(0),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("build").arg("--jobs").arg("over9000"),
- execs().with_status(1).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--jobs").arg("over9000"),
+ execs().with_status(1).with_stderr(
+ "\
error: Invalid value: could not parse `over9000` as a number
-"));
+",
+ ),
+ );
}
use git2;
use bufstream::BufStream;
use cargotest::support::paths;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
// Test that HTTP auth is offered from `credential.helper`
fn headers(rdr: &mut BufRead) -> HashSet<String> {
let valid = ["GET", "Authorization", "Accept", "User-Agent"];
- rdr.lines().map(|s| s.unwrap())
- .take_while(|s| s.len() > 2)
- .map(|s| s.trim().to_string())
- .filter(|s| {
- valid.iter().any(|prefix| s.starts_with(*prefix))
- })
- .collect()
+ rdr.lines()
+ .map(|s| s.unwrap())
+ .take_while(|s| s.len() > 2)
+ .map(|s| s.trim().to_string())
+ .filter(|s| valid.iter().any(|prefix| s.starts_with(*prefix)))
+ .collect()
}
- let t = thread::spawn(move|| {
+ let t = thread::spawn(move || {
let mut conn = BufStream::new(server.accept().unwrap().0);
let req = headers(&mut conn);
let user_agent = "User-Agent: git/2.0 (libgit2 0.27.0)";
- conn.write_all(b"\
+ conn.write_all(
+ b"\
HTTP/1.1 401 Unauthorized\r\n\
WWW-Authenticate: Basic realm=\"wheee\"\r\n
\r\n\
- ").unwrap();
- assert_eq!(req, vec![
- "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
- "Accept: */*",
- user_agent,
- ].into_iter().map(|s| s.to_string()).collect());
+ ",
+ ).unwrap();
+ assert_eq!(
+ req,
+ vec![
+ "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
+ "Accept: */*",
+ user_agent,
+ ].into_iter()
+ .map(|s| s.to_string())
+ .collect()
+ );
drop(conn);
let mut conn = BufStream::new(server.accept().unwrap().0);
let req = headers(&mut conn);
- conn.write_all(b"\
+ conn.write_all(
+ b"\
HTTP/1.1 401 Unauthorized\r\n\
WWW-Authenticate: Basic realm=\"wheee\"\r\n
\r\n\
- ").unwrap();
- assert_eq!(req, vec![
- "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
- "Authorization: Basic Zm9vOmJhcg==",
- "Accept: */*",
- user_agent,
- ].into_iter().map(|s| s.to_string()).collect());
+ ",
+ ).unwrap();
+ assert_eq!(
+ req,
+ vec![
+ "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
+ "Authorization: Basic Zm9vOmJhcg==",
+ "Accept: */*",
+ user_agent,
+ ].into_iter()
+ .map(|s| s.to_string())
+ .collect()
+ );
});
let script = project("script")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "script"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
println!("username=foo");
println!("password=bar");
}
- "#)
+ "#,
+ )
.build();
- assert_that(script.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(script.cargo("build").arg("-v"), execs().with_status(0));
let script = script.bin("script");
let config = paths::home().join(".gitconfig");
let mut config = git2::Config::open(&config).unwrap();
- config.set_str("credential.helper",
- &script.display().to_string()).unwrap();
+ config
+ .set_str("credential.helper", &script.display().to_string())
+ .unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
git = "http://127.0.0.1:{}/foo/bar"
- "#, addr.port()))
+ "#,
+ addr.port()
+ ),
+ )
.file("src/main.rs", "")
- .file(".cargo/config","\
+ .file(
+ ".cargo/config",
+ "\
[net]
retry = 0
- ")
+ ",
+ )
.build();
// This is a "contains" check because the last error differs by platform,
// may span multiple lines, and isn't relevant to this test.
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr_contains(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(&format!(
+ "\
[UPDATING] git repository `http://{addr}/foo/bar`
[ERROR] failed to load source for a dependency on `bar`
Caused by:
",
- addr = addr)));
+ addr = addr
+ )),
+ );
t.join().ok().unwrap();
}
fn https_something_happens() {
let server = TcpListener::bind("127.0.0.1:0").unwrap();
let addr = server.local_addr().unwrap();
- let t = thread::spawn(move|| {
+ let t = thread::spawn(move || {
let mut conn = server.accept().unwrap().0;
drop(conn.write(b"1234"));
drop(conn.shutdown(std::net::Shutdown::Write));
});
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
git = "https://127.0.0.1:{}/foo/bar"
- "#, addr.port()))
+ "#,
+ addr.port()
+ ),
+ )
.file("src/main.rs", "")
- .file(".cargo/config","\
+ .file(
+ ".cargo/config",
+ "\
[net]
retry = 0
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101).with_stderr_contains(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains(&format!(
+ "\
[UPDATING] git repository `https://{addr}/foo/bar`
-", addr = addr))
- .with_stderr_contains(&format!("\
+",
+ addr = addr
+ ))
+ .with_stderr_contains(&format!(
+ "\
Caused by:
{errmsg}
",
- errmsg = if cfg!(windows) {
- "[..]failed to send request: [..]"
- } else if cfg!(target_os = "macos") {
- // OSX is difficult to tests as some builds may use
- // Security.framework and others may use OpenSSL. In that case let's
- // just not verify the error message here.
- "[..]"
- } else {
- "[..]SSL error: [..]"
- })));
+ errmsg = if cfg!(windows) {
+ "[..]failed to send request: [..]"
+ } else if cfg!(target_os = "macos") {
+ // OSX is difficult to tests as some builds may use
+ // Security.framework and others may use OpenSSL. In that case let's
+ // just not verify the error message here.
+ "[..]"
+ } else {
+ "[..]SSL error: [..]"
+ }
+ )),
+ );
t.join().ok().unwrap();
}
fn ssh_something_happens() {
let server = TcpListener::bind("127.0.0.1:0").unwrap();
let addr = server.local_addr().unwrap();
- let t = thread::spawn(move|| {
+ let t = thread::spawn(move || {
drop(server.accept().unwrap());
});
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
git = "ssh://127.0.0.1:{}/foo/bar"
- "#, addr.port()))
+ "#,
+ addr.port()
+ ),
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101).with_stderr_contains(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains(&format!(
+ "\
[UPDATING] git repository `ssh://{addr}/foo/bar`
-", addr = addr))
- .with_stderr_contains("\
+",
+ addr = addr
+ ))
+ .with_stderr_contains(
+ "\
Caused by:
[..]failed to start SSH session: Failed getting banner[..]
-"));
+",
+ ),
+ );
t.join().ok().unwrap();
}
use cargotest::support::{basic_bin_manifest, execs, project, Project};
-use hamcrest::{assert_that};
+use hamcrest::assert_that;
fn verbose_output_for_lib(p: &Project) -> String {
- format!("\
+ format!(
+ "\
[COMPILING] {name} v{version} ({url})
[RUNNING] `rustc --crate-name {name} src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.root().display(), url = p.url(),
- name = "foo", version = "0.0.1")
+ dir = p.root().display(),
+ url = p.url(),
+ name = "foo",
+ version = "0.0.1"
+ )
}
#[test]
fn build_lib_only() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = ["wycats@example.com"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("build").arg("--lib").arg("-v"),
- execs()
- .with_status(0)
- .with_stderr(verbose_output_for_lib(&p)));
+ assert_that(
+ p.cargo("build").arg("--lib").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr(verbose_output_for_lib(&p)),
+ );
}
-
#[test]
fn build_with_no_lib() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("--lib"),
- execs().with_status(101)
- .with_stderr("[ERROR] no library targets found"));
+ assert_that(
+ p.cargo("build").arg("--lib"),
+ execs()
+ .with_status(101)
+ .with_stderr("[ERROR] no library targets found"),
+ );
}
#[test]
fn build_with_relative_cargo_home_path() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
[dependencies]
"test-dependency" = { path = "src/test_dependency" }
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/test_dependency/src/lib.rs", r#" "#)
- .file("src/test_dependency/Cargo.toml", r#"
+ .file(
+ "src/test_dependency/Cargo.toml",
+ r#"
[package]
name = "test-dependency"
version = "0.0.1"
authors = ["wycats@example.com"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").env("CARGO_HOME", "./cargo_home/"),
- execs()
- .with_status(0));
+ assert_that(
+ p.cargo("build").env("CARGO_HOME", "./cargo_home/"),
+ execs().with_status(0),
+ );
}
use std::path::PathBuf;
use cargotest::{rustc_host, sleep_ms};
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use cargotest::support::paths::CargoPathExt;
use cargotest::support::registry::Package;
-use hamcrest::{assert_that, existing_file, existing_dir};
+use hamcrest::{assert_that, existing_dir, existing_file};
#[test]
fn custom_build_script_failed() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
build = "build.rs"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
std::process::exit(101);
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr(&format!(
+ "\
[COMPILING] foo v0.5.0 ({url})
[RUNNING] `rustc --crate-name build_script_build build.rs --crate-type bin [..]`
[RUNNING] `[..][/]build-script-build`
[ERROR] failed to run custom build command for `foo v0.5.0 ({url})`
process didn't exit successfully: `[..][/]build-script-build` (exit code: 101)",
-url = p.url())));
+ url = p.url()
+ )),
+ );
}
#[test]
fn custom_build_env_vars() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[features]
foo = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn hello() {}
- "#);
+ "#,
+ );
- let file_content = format!(r#"
+ let file_content = format!(
+ r#"
use std::env;
use std::io::prelude::*;
use std::path::Path;
assert_eq!(rustdoc, "rustdoc");
}}
"#,
- p.root().join("target").join("debug").join("build").display());
+ p.root()
+ .join("target")
+ .join("debug")
+ .join("build")
+ .display()
+ );
let p = p.file("bar/build.rs", &file_content).build();
- assert_that(p.cargo("build").arg("--features").arg("bar_feat"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--features").arg("bar_feat"),
+ execs().with_status(0),
+ );
}
#[test]
fn custom_build_script_wrong_rustc_flags() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
build = "build.rs"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-flags=-aaa -bbb");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr_contains(&format!("\
-[ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ({})`: \
-`-aaa -bbb`",
-p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(&format!(
+ "\
+ [ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ({})`: \
+ `-aaa -bbb`",
+ p.url()
+ )),
+ );
}
/*
#[test]
fn links_no_build_cmd() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
links = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] package `foo v0.5.0 (file://[..])` specifies that it links to `a` but does \
not have a custom build script
-"));
+",
+ ),
+ );
}
#[test]
fn links_duplicates() {
// this tests that the links_duplicates are caught at resolver time
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies.a-sys]
path = "a-sys"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("build.rs", "")
- .file("a-sys/Cargo.toml", r#"
+ .file(
+ "a-sys/Cargo.toml",
+ r#"
[project]
name = "a-sys"
version = "0.5.0"
authors = []
links = "a"
build = "build.rs"
- "#)
+ "#,
+ )
.file("a-sys/src/lib.rs", "")
.file("a-sys/build.rs", "")
.build();
fn links_duplicates_deep_dependency() {
// this tests that the links_duplicates are caught at resolver time
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("build.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
[dependencies.a-sys]
path = "a-sys"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.file("a/build.rs", "")
- .file("a/a-sys/Cargo.toml", r#"
+ .file(
+ "a/a-sys/Cargo.toml",
+ r#"
[project]
name = "a-sys"
version = "0.5.0"
authors = []
links = "a"
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/a-sys/src/lib.rs", "")
.file("a/a-sys/build.rs", "")
.build();
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::env;
fn main() {
assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"),
assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"),
"baz");
}
- "#)
- .file(".cargo/config", &format!(r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}.foo]
rustc-flags = "-L foo -L bar"
foo = "bar"
bar = "baz"
- "#, target))
- .file("a/Cargo.toml", r#"
+ "#,
+ target
+ ),
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
links = "foo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.file("a/build.rs", "not valid rust code")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[..]
[..]
[..]
[..]
[RUNNING] `rustc --crate-name foo [..] -L foo -L bar`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("build.rs", "fn main() {}")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}.foo]
rustc-flags = "-L foo -L bar"
foo = "bar"
bar = "baz"
- "#, target))
+ "#,
+ target
+ ),
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn links_passes_env_vars() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::env;
fn main() {
assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar");
assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz");
}
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
links = "foo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("a/build.rs", r#"
+ .file(
+ "a/build.rs",
+ r#"
use std::env;
fn main() {
let lib = env::var("CARGO_MANIFEST_LINKS").unwrap();
println!("cargo:foo=bar");
println!("cargo:bar=baz");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn only_rerun_build_script() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
p.root().move_into_the_past();
File::create(&p.root().join("some-new-file")).unwrap();
p.root().move_into_the_past();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 (file://[..])
[RUNNING] `[..][/]build-script-build`
[RUNNING] `rustc --crate-name foo [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn rebuild_continues_to_pass_env_vars() {
let a = project("a")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
links = "foo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::time::Duration;
fn main() {
println!("cargo:foo=bar");
println!("cargo:bar=baz");
std::thread::sleep(Duration::from_millis(500));
}
- "#)
+ "#,
+ )
.build();
a.root().move_into_the_past();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies.a]
path = '{}'
- "#, a.root().display()))
+ "#,
+ a.root().display()
+ ),
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::env;
fn main() {
assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar");
assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
p.root().move_into_the_past();
File::create(&p.root().join("some-new-file")).unwrap();
p.root().move_into_the_past();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn testing_and_such() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
println!("build");
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
p.root().move_into_the_past();
File::create(&p.root().join("src/lib.rs")).unwrap();
p.root().move_into_the_past();
println!("test");
- assert_that(p.cargo("test").arg("-vj1"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("test").arg("-vj1"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo v0.5.0 (file://[..])
[RUNNING] `[..][/]build-script-build`
[RUNNING] `rustc --crate-name foo [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..][/]foo-[..][EXE]`
[DOCTEST] foo
-[RUNNING] `rustdoc --test [..]`")
- .with_stdout_contains_n("running 0 tests", 2));
+[RUNNING] `rustdoc --test [..]`",
+ )
+ .with_stdout_contains_n("running 0 tests", 2),
+ );
println!("doc");
- assert_that(p.cargo("doc").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("doc").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[DOCUMENTING] foo v0.5.0 (file://[..])
[RUNNING] `rustdoc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- File::create(&p.root().join("src/main.rs")).unwrap()
- .write_all(b"fn main() {}").unwrap();
+",
+ ),
+ );
+
+ File::create(&p.root().join("src/main.rs"))
+ .unwrap()
+ .write_all(b"fn main() {}")
+ .unwrap();
println!("run");
- assert_that(p.cargo("run"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("run"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]foo[EXE]`
-"));
+",
+ ),
+ );
}
#[test]
fn propagation_of_l_flags() {
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
[dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
[dependencies.b]
path = "../b"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("a/build.rs", r#"
+ .file(
+ "a/build.rs",
+ r#"
fn main() {
println!("cargo:rustc-flags=-L bar");
}
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.5.0"
authors = []
links = "foo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.file("b/build.rs", "bad file")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}.foo]
rustc-flags = "-L foo"
- "#, target))
+ "#,
+ target
+ ),
+ )
.build();
- assert_that(p.cargo("build").arg("-v").arg("-j1"),
- execs().with_status(0)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v").arg("-j1"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]`
[COMPILING] foo v0.5.0 (file://[..])
[RUNNING] `rustc --crate-name foo [..] -L bar -L foo`
-"));
+",
+ ),
+ );
}
#[test]
fn propagation_of_l_flags_new() {
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
[dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
[dependencies.b]
path = "../b"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("a/build.rs", r#"
+ .file(
+ "a/build.rs",
+ r#"
fn main() {
println!("cargo:rustc-link-search=bar");
}
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.5.0"
authors = []
links = "foo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.file("b/build.rs", "bad file")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}.foo]
rustc-link-search = ["foo"]
- "#, target))
+ "#,
+ target
+ ),
+ )
.build();
- assert_that(p.cargo("build").arg("-v").arg("-j1"),
- execs().with_status(0)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v").arg("-j1"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]`
[COMPILING] foo v0.5.0 (file://[..])
[RUNNING] `rustc --crate-name foo [..] -L bar -L foo`
-"));
+",
+ ),
+ );
}
#[test]
fn build_deps_simple() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
build = "build.rs"
[build-dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", "
+ .file(
+ "build.rs",
+ "
#[allow(unused_extern_crates)]
extern crate a;
fn main() {}
- ")
- .file("a/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a v0.5.0 (file://[..])
[RUNNING] `rustc --crate-name a [..]`
[COMPILING] foo v0.5.0 (file://[..])
[RUNNING] `[..][/]foo-[..][/]build-script-build`
[RUNNING] `rustc --crate-name foo [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn build_deps_not_for_normal() {
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
build = "build.rs"
[build-dependencies.aaaaa]
path = "a"
- "#)
- .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate aaaaa;")
- .file("build.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate aaaaa;",
+ )
+ .file(
+ "build.rs",
+ "
#[allow(unused_extern_crates)]
extern crate aaaaa;
fn main() {}
- ")
- .file("a/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "aaaaa"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--target").arg(&target),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v").arg("--target").arg(&target),
+ execs()
+ .with_status(101)
+ .with_stderr_contains(
+ "\
[..]can't find crate for `aaaaa`[..]
-")
- .with_stderr_contains("\
+",
+ )
+ .with_stderr_contains(
+ "\
[ERROR] Could not compile `foo`.
Caused by:
process didn't exit successfully: [..]
-"));
+",
+ ),
+ );
}
#[test]
fn build_cmd_with_a_build_cmd() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[build-dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", "
+ .file(
+ "build.rs",
+ "
#[allow(unused_extern_crates)]
extern crate a;
fn main() {}
- ")
- .file("a/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
[build-dependencies.b]
path = "../b"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("a/build.rs", "#[allow(unused_extern_crates)] extern crate b; fn main() {}")
- .file("b/Cargo.toml", r#"
+ .file(
+ "a/build.rs",
+ "#[allow(unused_extern_crates)] extern crate b; fn main() {}",
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] b v0.5.0 (file://[..])
[RUNNING] `rustc --crate-name b [..]`
[COMPILING] a v0.5.0 (file://[..])
--out-dir [..] \
-L [..]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn out_dir_is_preserved() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::env;
use std::fs::File;
use std::path::Path;
let out = env::var("OUT_DIR").unwrap();
File::create(Path::new(&out).join("foo")).unwrap();
}
- "#)
+ "#,
+ )
.build();
// Make the file
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
p.root().move_into_the_past();
// Change to asserting that it's there
- File::create(&p.root().join("build.rs")).unwrap().write_all(br#"
+ File::create(&p.root().join("build.rs"))
+ .unwrap()
+ .write_all(
+ br#"
use std::env;
use std::old_io::File;
fn main() {
let out = env::var("OUT_DIR").unwrap();
File::open(&Path::new(&out).join("foo")).unwrap();
}
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
p.root().move_into_the_past();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
// Run a fresh build where file should be preserved
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
// One last time to make sure it's still there.
File::create(&p.root().join("foo")).unwrap();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn output_separate_lines() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-flags=-L foo");
println!("cargo:rustc-flags=-l static=foo");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr_contains(
+ "\
[COMPILING] foo v0.5.0 (file://[..])
[RUNNING] `rustc [..] build.rs [..]`
[RUNNING] `[..][/]foo-[..][/]build-script-build`
[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo`
[ERROR] could not find native static library [..]
-"));
+",
+ ),
+ );
}
#[test]
fn output_separate_lines_new() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-link-search=foo");
println!("cargo:rustc-link-lib=static=foo");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr_contains(
+ "\
[COMPILING] foo v0.5.0 (file://[..])
[RUNNING] `rustc [..] build.rs [..]`
[RUNNING] `[..][/]foo-[..][/]build-script-build`
[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo`
[ERROR] could not find native static library [..]
-"));
+",
+ ),
+ );
}
#[cfg(not(windows))] // FIXME(#867)
#[test]
fn code_generation() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
include!(concat!(env!("OUT_DIR"), "/hello.rs"));
fn main() {
println!("{}", message());
}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
use std::env;
use std::fs::File;
use std::io::prelude::*;
}
").unwrap();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo v0.5.0 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] `target[/]debug[/]foo`")
- .with_stdout("\
+[RUNNING] `target[/]debug[/]foo`",
+ )
+ .with_stdout(
+ "\
Hello, World!
-"));
+",
+ ),
+ );
- assert_that(p.cargo("test"),
- execs().with_status(0));
+ assert_that(p.cargo("test"), execs().with_status(0));
}
#[test]
fn release_with_build_script() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v").arg("--release"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("-v").arg("--release"),
+ execs().with_status(0),
+ );
}
#[test]
fn build_script_only() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("build.rs", r#"fn main() {}"#)
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
no targets specified in the manifest
- either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present"));
+ either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present",
+ ),
+ );
}
#[test]
fn shared_dep_with_a_build_script() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[build-dependencies.b]
path = "b"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("build.rs", "fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/build.rs", "fn main() {}")
.file("a/src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.5.0"
[dependencies.a]
path = "../a"
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn transitive_dep_host() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[build-dependencies.b]
path = "b"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("build.rs", "fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.5.0"
authors = []
links = "foo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/build.rs", "fn main() {}")
.file("a/src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.5.0"
[dependencies.a]
path = "../a"
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn test_a_lib_with_a_build_command() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
include!(concat!(env!("OUT_DIR"), "/foo.rs"));
/// ```
pub fn bar() {
assert_eq!(foo(), 1);
}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
use std::env;
use std::io::prelude::*;
use std::fs::File;
fn foo() -> i32 { 1 }
").unwrap();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0));
+ assert_that(p.cargo("test"), execs().with_status(0));
}
#[test]
fn test_dev_dep_build_script() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dev-dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/build.rs", "fn main() {}")
.file("a/src/lib.rs", "")
.build();
#[test]
fn build_script_with_dynamic_native_dependency() {
-
let _workspace = project("ws")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["builder", "foo"]
- "#)
+ "#,
+ )
.build();
let build = project("ws/builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
name = "builder"
crate-type = ["dylib"]
plugin = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[no_mangle]
pub extern fn foo() {}
- "#)
+ "#,
+ )
.build();
let foo = project("ws/foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[build-dependencies.bar]
path = "bar"
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
extern crate bar;
fn main() { bar::bar() }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("bar/build.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
use std::env;
use std::path::PathBuf;
println!("cargo:rustc-link-search=native={}/target/debug/deps",
src.display());
}
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {
#[cfg_attr(not(target_env = "msvc"), link(name = "builder"))]
#[cfg_attr(target_env = "msvc", link(name = "builder.dll"))]
extern { fn foo(); }
unsafe { foo() }
}
- "#)
+ "#,
+ )
.build();
- assert_that(build.cargo("build").arg("-v")
- .env("RUST_LOG", "cargo::ops::cargo_rustc"),
- execs().with_status(0));
+ assert_that(
+ build
+ .cargo("build")
+ .arg("-v")
+ .env("RUST_LOG", "cargo::ops::cargo_rustc"),
+ execs().with_status(0),
+ );
- assert_that(foo.cargo("build").arg("-v").env("SRC", build.root())
- .env("RUST_LOG", "cargo::ops::cargo_rustc"),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build")
+ .arg("-v")
+ .env("SRC", build.root())
+ .env("RUST_LOG", "cargo::ops::cargo_rustc"),
+ execs().with_status(0),
+ );
}
#[test]
fn profile_and_opt_level_set_correctly() {
let build = project("builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::env;
fn main() {
assert_eq!(env::var("PROFILE").unwrap(), "release");
assert_eq!(env::var("DEBUG").unwrap(), "false");
}
- "#)
+ "#,
+ )
.build();
- assert_that(build.cargo("bench"),
- execs().with_status(0));
+ assert_that(build.cargo("bench"), execs().with_status(0));
}
#[test]
fn build_script_with_lto() {
let build = project("builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
[profile.dev]
lto = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
}
- "#)
+ "#,
+ )
.build();
- assert_that(build.cargo("build"),
- execs().with_status(0));
+ assert_that(build.cargo("build"), execs().with_status(0));
}
#[test]
fn test_duplicate_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[build-dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() { bar::do_nothing() }
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
extern crate bar;
fn main() { bar::do_nothing() }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn do_nothing() {}")
.build();
#[test]
fn cfg_feedback() {
let build = project("builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("src/main.rs", "
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
#[cfg(foo)]
fn main() {}
- ")
- .file("build.rs", r#"
+ ",
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-cfg=foo");
}
- "#)
+ "#,
+ )
.build();
- assert_that(build.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(build.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
links = "a"
build = "build.rs"
- "#)
- .file("src/main.rs", "
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
#[cfg(foo)]
fn main() {}
- ")
+ ",
+ )
.file("build.rs", "")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}.a]
rustc-cfg = ["foo"]
- "#, target))
+ "#,
+ target
+ ),
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn cfg_test() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-cfg=foo");
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
///
/// ```
/// extern crate foo;
fn test_foo() {
foo()
}
- "#)
- .file("tests/test.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
#[cfg(foo)]
#[test]
fn test_bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_stderr(format!("\
+ assert_that(
+ p.cargo("test").arg("-v"),
+ execs()
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[RUNNING] [..] build.rs [..]
[RUNNING] `[..][/]build-script-build`
[RUNNING] `[..][/]foo-[..][EXE]`
[RUNNING] `[..][/]test-[..][EXE]`
[DOCTEST] foo
-[RUNNING] [..] --cfg foo[..]", dir = p.url()))
- .with_stdout_contains("test test_foo ... ok")
- .with_stdout_contains("test test_bar ... ok")
- .with_stdout_contains_n("test [..] ... ok", 3));
+[RUNNING] [..] --cfg foo[..]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test test_foo ... ok")
+ .with_stdout_contains("test test_bar ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3),
+ );
}
#[test]
fn cfg_doc() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-cfg=foo");
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[cfg(foo)]
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("bar/build.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
fn main() {
println!("cargo:rustc-cfg=bar");
}
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[cfg(bar)]
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0));
+ assert_that(p.cargo("doc"), execs().with_status(0));
assert_that(&p.root().join("target/doc"), existing_dir());
- assert_that(&p.root().join("target/doc/foo/fn.foo.html"), existing_file());
- assert_that(&p.root().join("target/doc/bar/fn.bar.html"), existing_file());
+ assert_that(
+ &p.root().join("target/doc/foo/fn.foo.html"),
+ existing_file(),
+ );
+ assert_that(
+ &p.root().join("target/doc/bar/fn.bar.html"),
+ existing_file(),
+ );
}
#[test]
fn cfg_override_test() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
links = "a"
- "#)
+ "#,
+ )
.file("build.rs", "")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}.a]
rustc-cfg = ["foo"]
- "#, rustc_host()))
- .file("src/lib.rs", r#"
+ "#,
+ rustc_host()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"
///
/// ```
/// extern crate foo;
fn test_foo() {
foo()
}
- "#)
- .file("tests/test.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
#[cfg(foo)]
#[test]
fn test_bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_stderr(format!("\
+ assert_that(
+ p.cargo("test").arg("-v"),
+ execs()
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[RUNNING] `[..]`
[RUNNING] `[..]`
[RUNNING] `[..][/]foo-[..][EXE]`
[RUNNING] `[..][/]test-[..][EXE]`
[DOCTEST] foo
-[RUNNING] [..] --cfg foo[..]", dir = p.url()))
- .with_stdout_contains("test test_foo ... ok")
- .with_stdout_contains("test test_bar ... ok")
- .with_stdout_contains_n("test [..] ... ok", 3));
+[RUNNING] [..] --cfg foo[..]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test test_foo ... ok")
+ .with_stdout_contains("test test_bar ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3),
+ );
}
#[test]
fn cfg_override_doc() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file(".cargo/config", &format!(r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{target}.a]
rustc-cfg = ["foo"]
[target.{target}.b]
rustc-cfg = ["bar"]
- "#, target = rustc_host()))
+ "#,
+ target = rustc_host()
+ ),
+ )
.file("build.rs", "")
- .file("src/lib.rs", r#"
+ .file(
+ "src/lib.rs",
+ r#"
#[cfg(foo)]
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
build = "build.rs"
links = "b"
- "#)
+ "#,
+ )
.file("bar/build.rs", "")
- .file("bar/src/lib.rs", r#"
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[cfg(bar)]
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0));
+ assert_that(p.cargo("doc"), execs().with_status(0));
assert_that(&p.root().join("target/doc"), existing_dir());
- assert_that(&p.root().join("target/doc/foo/fn.foo.html"), existing_file());
- assert_that(&p.root().join("target/doc/bar/fn.bar.html"), existing_file());
+ assert_that(
+ &p.root().join("target/doc/foo/fn.foo.html"),
+ existing_file(),
+ );
+ assert_that(
+ &p.root().join("target/doc/bar/fn.bar.html"),
+ existing_file(),
+ );
}
#[test]
fn env_build() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
const FOO: &'static str = env!("FOO");
fn main() {
println!("{}", FOO);
}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-env=FOO=foo");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
- assert_that(p.cargo("run").arg("-v"),
- execs().with_status(0).with_stdout("foo\n"));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
+ assert_that(
+ p.cargo("run").arg("-v"),
+ execs().with_status(0).with_stdout("foo\n"),
+ );
}
#[test]
fn env_test() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-env=FOO=foo");
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub const FOO: &'static str = env!("FOO");
- "#)
- .file("tests/test.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
extern crate foo;
#[test]
fn test_foo() {
assert_eq!("foo", foo::FOO);
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_stderr(format!("\
+ assert_that(
+ p.cargo("test").arg("-v"),
+ execs()
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[RUNNING] [..] build.rs [..]
[RUNNING] `[..][/]build-script-build`
[RUNNING] `[..][/]foo-[..][EXE]`
[RUNNING] `[..][/]test-[..][EXE]`
[DOCTEST] foo
-[RUNNING] [..] --crate-name foo[..]", dir = p.url()))
- .with_stdout_contains_n("running 0 tests", 2)
- .with_stdout_contains("test test_foo ... ok"));
+[RUNNING] [..] --crate-name foo[..]",
+ dir = p.url()
+ ))
+ .with_stdout_contains_n("running 0 tests", 2)
+ .with_stdout_contains("test test_foo ... ok"),
+ );
}
#[test]
fn env_doc() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
const FOO: &'static str = env!("FOO");
fn main() {}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-env=FOO=foo");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("doc").arg("-v"), execs().with_status(0));
}
#[test]
fn flags_go_into_tests() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
b = { path = "b" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("tests/foo.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.5.0"
authors = []
[dependencies]
a = { path = "../a" }
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("a/build.rs", r#"
+ .file(
+ "a/build.rs",
+ r#"
fn main() {
println!("cargo:rustc-link-search=test");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v").arg("--test=foo"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("test").arg("-v").arg("--test=foo"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] a v0.5.0 ([..]
[RUNNING] `rustc [..] a[/]build.rs [..]`
[RUNNING] `[..][/]build-script-build`
[RUNNING] `rustc [..] src[/]lib.rs [..] -L test[..]`
[RUNNING] `rustc [..] tests[/]foo.rs [..] -L test[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] `[..][/]foo-[..][EXE]`")
- .with_stdout_contains("running 0 tests"));
-
- assert_that(p.cargo("test").arg("-v").arg("-pb").arg("--lib"),
- execs().with_status(0)
- .with_stderr("\
+[RUNNING] `[..][/]foo-[..][EXE]`",
+ )
+ .with_stdout_contains("running 0 tests"),
+ );
+
+ assert_that(
+ p.cargo("test").arg("-v").arg("-pb").arg("--lib"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[FRESH] a v0.5.0 ([..]
[COMPILING] b v0.5.0 ([..]
[RUNNING] `rustc [..] b[/]src[/]lib.rs [..] -L test[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] `[..][/]b-[..][EXE]`")
- .with_stdout_contains("running 0 tests"));
+[RUNNING] `[..][/]b-[..][EXE]`",
+ )
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
fn diamond_passes_args_only_once() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
a = { path = "a" }
b = { path = "b" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("tests/foo.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
[dependencies]
b = { path = "../b" }
c = { path = "../c" }
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.5.0"
authors = []
[dependencies]
c = { path = "../c" }
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
- .file("c/Cargo.toml", r#"
+ .file(
+ "c/Cargo.toml",
+ r#"
[project]
name = "c"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
- .file("c/build.rs", r#"
+ "#,
+ )
+ .file(
+ "c/build.rs",
+ r#"
fn main() {
println!("cargo:rustc-link-search=native=test");
}
- "#)
+ "#,
+ )
.file("c/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] c v0.5.0 ([..]
[RUNNING] `rustc [..]`
[RUNNING] `[..]`
[COMPILING] foo v0.5.0 ([..]
[RUNNING] `[..]rlib -L native=test`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn adding_an_override_invalidates() {
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
links = "foo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file(".cargo/config", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-link-search=native=foo");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..]
[RUNNING] `rustc [..]`
[RUNNING] `[..]`
[RUNNING] `rustc [..] -L native=foo`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- File::create(p.root().join(".cargo/config")).unwrap().write_all(format!("
+",
+ ),
+ );
+
+ File::create(p.root().join(".cargo/config"))
+ .unwrap()
+ .write_all(
+ format!(
+ "
[target.{}.foo]
rustc-link-search = [\"native=bar\"]
- ", target).as_bytes()).unwrap();
-
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ ",
+ target
+ ).as_bytes(),
+ )
+ .unwrap();
+
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..]
[RUNNING] `rustc [..] -L native=bar`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn changing_an_override_invalidates() {
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
links = "foo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", &format!("
+ .file(
+ ".cargo/config",
+ &format!(
+ "
[target.{}.foo]
rustc-link-search = [\"native=foo\"]
- ", target))
+ ",
+ target
+ ),
+ )
.file("build.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..]
[RUNNING] `rustc [..] -L native=foo`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- File::create(p.root().join(".cargo/config")).unwrap().write_all(format!("
+",
+ ),
+ );
+
+ File::create(p.root().join(".cargo/config"))
+ .unwrap()
+ .write_all(
+ format!(
+ "
[target.{}.foo]
rustc-link-search = [\"native=bar\"]
- ", target).as_bytes()).unwrap();
-
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ ",
+ target
+ ).as_bytes(),
+ )
+ .unwrap();
+
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..]
[RUNNING] `rustc [..] -L native=bar`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
-
#[test]
fn fresh_builds_possible_with_link_libs() {
// The bug is non-deterministic. Sometimes you can get a fresh build
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
links = "nativefoo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", &format!("
+ .file(
+ ".cargo/config",
+ &format!(
+ "
[target.{}.nativefoo]
rustc-link-lib = [\"a\"]
rustc-link-search = [\"./b\"]
rustc-flags = \"-l z -L ./\"
- ", target))
+ ",
+ target
+ ),
+ )
.file("build.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..]
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("build")
- .arg("-v")
- .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build")
+ .arg("-v")
+ .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info"),
+ execs().with_status(0).with_stderr(
+ "\
[FRESH] foo v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
-
#[test]
fn fresh_builds_possible_with_multiple_metadata_overrides() {
// The bug is non-deterministic. Sometimes you can get a fresh build
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
links = "foo"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", &format!("
+ .file(
+ ".cargo/config",
+ &format!(
+ "
[target.{}.foo]
a = \"\"
b = \"\"
c = \"\"
d = \"\"
e = \"\"
- ", target))
+ ",
+ target
+ ),
+ )
.file("build.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..]
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("build")
- .arg("-v")
- .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build")
+ .arg("-v")
+ .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info"),
+ execs().with_status(0).with_stderr(
+ "\
[FRESH] foo v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
-
#[test]
fn rebuild_only_on_explicit_paths() {
let p = project("a")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rerun-if-changed=foo");
println!("cargo:rerun-if-changed=bar");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
// files don't exist, so should always rerun if they don't exist
println!("run without");
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a v0.5.0 ([..])
[RUNNING] `[..][/]build-script-build`
[RUNNING] `rustc [..] src[/]lib.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
sleep_ms(1000);
File::create(p.root().join("foo")).unwrap();
// now the exist, so run once, catch the mtime, then shouldn't run again
println!("run with");
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a v0.5.0 ([..])
[RUNNING] `[..][/]build-script-build`
[RUNNING] `rustc [..] src[/]lib.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
println!("run with2");
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[FRESH] a v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
sleep_ms(1000);
// random other files do not affect freshness
println!("run baz");
File::create(p.root().join("baz")).unwrap();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[FRESH] a v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
// but changing dependent files does
println!("run foo change");
File::create(p.root().join("foo")).unwrap();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a v0.5.0 ([..])
[RUNNING] `[..][/]build-script-build`
[RUNNING] `rustc [..] src[/]lib.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
// .. as does deleting a file
println!("run foo delete");
fs::remove_file(p.root().join("bar")).unwrap();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a v0.5.0 ([..])
[RUNNING] `[..][/]build-script-build`
[RUNNING] `rustc [..] src[/]lib.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
-
#[test]
fn doctest_recieves_build_link_args() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
[dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
links = "bar"
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("a/build.rs", r#"
+ .file(
+ "a/build.rs",
+ r#"
fn main() {
println!("cargo:rustc-link-search=native=bar");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("test").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[RUNNING] `rustdoc --test [..] --crate-name foo [..]-L native=bar[..]`
-"));
+",
+ ),
+ );
}
#[test]
fn please_respect_the_dag() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
a = { path = 'a' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-link-search=native=foo");
}
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
links = "bar"
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("a/build.rs", r#"
+ .file(
+ "a/build.rs",
+ r#"
fn main() {
println!("cargo:rustc-link-search=native=bar");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[RUNNING] `rustc [..] -L native=foo -L native=bar[..]`
-"));
+",
+ ),
+ );
}
#[test]
fn non_utf8_output() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
use std::io::prelude::*;
fn main() {
// now print more non-utf8
out.write_all(b"\xff\xff\n").unwrap();
}
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[cfg(foo)]
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn custom_target_dir() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
a = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
target-dir = 'test'
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/build.rs", "fn main() {}")
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn panic_abort_with_build_scripts() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
a = { path = "a" }
- "#)
- .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate a;")
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate a;",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
[build-dependencies]
b = { path = "../b" }
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("a/build.rs", "#[allow(unused_extern_crates)] extern crate b; fn main() {}")
- .file("b/Cargo.toml", r#"
+ .file(
+ "a/build.rs",
+ "#[allow(unused_extern_crates)] extern crate b; fn main() {}",
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--release"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("-v").arg("--release"),
+ execs().with_status(0),
+ );
}
#[test]
fn warnings_emitted() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:warning=foo");
println!("cargo:warning=bar");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[RUNNING] `rustc [..]`
[RUNNING] `[..]`
warning: bar
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn warnings_hidden_for_upstream() {
Package::new("bar", "0.1.0")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:warning=foo");
println!("cargo:warning=bar");
}
- "#)
- .file("Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
build = "build.rs"
- "#)
- .file("src/lib.rs", "")
- .publish();
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] bar v0.1.0 ([..])
[COMPILING] bar v0.1.0
[COMPILING] foo v0.5.0 ([..])
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn warnings_printed_on_vv() {
Package::new("bar", "0.1.0")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:warning=foo");
println!("cargo:warning=bar");
}
- "#)
- .file("Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
build = "build.rs"
- "#)
- .file("src/lib.rs", "")
- .publish();
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-vv"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-vv"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] bar v0.1.0 ([..])
[COMPILING] bar v0.1.0
[COMPILING] foo v0.5.0 ([..])
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn output_shows_on_vv() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::io::prelude::*;
fn main() {
std::io::stderr().write_all(b"stderr\n").unwrap();
std::io::stdout().write_all(b"stdout\n").unwrap();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-vv"),
- execs().with_status(0)
- .with_stdout("\
+ assert_that(
+ p.cargo("build").arg("-vv"),
+ execs()
+ .with_status(0)
+ .with_stdout(
+ "\
stdout
-")
- .with_stderr("\
+",
+ )
+ .with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[RUNNING] `rustc [..]`
[RUNNING] `[..]`
stderr
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
links = "a.b"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-link-search=bar")
}
- "#)
- .file(".cargo/config", &format!(r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}.'a.b']
rustc-link-search = ["foo"]
- "#, target))
+ "#,
+ target
+ ),
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[RUNNING] `rustc --crate-name foo [..] [..] -L foo[..]`
-"));
+",
+ ),
+ );
}
#[test]
fn rustc_and_rustdoc_set_correctly() {
let p = project("builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::env;
fn main() {
assert_eq!(env::var("RUSTC").unwrap(), "rustc");
assert_eq!(env::var("RUSTDOC").unwrap(), "rustdoc");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(0));
+ assert_that(p.cargo("bench"), execs().with_status(0));
}
#[test]
fn cfg_env_vars_available() {
let p = project("builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::env;
fn main() {
assert_eq!(fam, "windows");
}
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(0));
+ assert_that(p.cargo("bench"), execs().with_status(0));
}
#[test]
fn switch_features_rerun() {
let p = project("builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
[features]
foo = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
println!(include_str!(concat!(env!("OUT_DIR"), "/output")));
}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
use std::env;
use std::fs::File;
use std::io::Write;
f.write_all(b"bar").unwrap();
}
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("-v").arg("--features=foo"),
- execs().with_status(0).with_stdout("foo\n"));
- assert_that(p.cargo("run").arg("-v"),
- execs().with_status(0).with_stdout("bar\n"));
- assert_that(p.cargo("run").arg("-v").arg("--features=foo"),
- execs().with_status(0).with_stdout("foo\n"));
+ assert_that(
+ p.cargo("run").arg("-v").arg("--features=foo"),
+ execs().with_status(0).with_stdout("foo\n"),
+ );
+ assert_that(
+ p.cargo("run").arg("-v"),
+ execs().with_status(0).with_stdout("bar\n"),
+ );
+ assert_that(
+ p.cargo("run").arg("-v").arg("--features=foo"),
+ execs().with_status(0).with_stdout("foo\n"),
+ );
}
#[test]
fn assume_build_script_when_build_rs_present() {
let p = project("builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
if ! cfg!(foo) {
panic!("the build script was not run");
}
}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-cfg=foo");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("run").arg("-v"), execs().with_status(0));
}
#[test]
fn if_build_set_to_false_dont_treat_build_rs_as_build_script() {
let p = project("builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
authors = []
build = false
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
if cfg!(foo) {
panic!("the build script was run");
}
}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-cfg=foo");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("run").arg("-v"), execs().with_status(0));
}
#[test]
// in the hopes it will have a much higher chance of triggering it.
Package::new("dep1", "0.1.0")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep1"
version = "0.1.0"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-flags=-L native=test1");
}
- "#)
- .file("src/lib.rs", "")
- .publish();
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
Package::new("dep2", "0.1.0")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep2"
version = "0.1.0"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-flags=-L native=test2");
}
- "#)
- .file("src/lib.rs", "")
- .publish();
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
Package::new("dep3", "0.1.0")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep3"
version = "0.1.0"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-flags=-L native=test3");
}
- "#)
- .file("src/lib.rs", "")
- .publish();
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
Package::new("dep4", "0.1.0")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep4"
version = "0.1.0"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rustc-flags=-L native=test4");
}
- "#)
- .file("src/lib.rs", "")
- .publish();
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
dep2 = "*"
dep3 = "*"
dep4 = "*"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[RUNNING] `rustc --crate-name foo [..] -L native=test1 -L native=test2 \
-L native=test3 -L native=test4`
-"));
+",
+ ),
+ );
}
#[test]
fn links_duplicates_with_cycle() {
// this tests that the links_duplicates are caught at resolver time
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dev-dependencies]
b = { path = "b" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("build.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
links = "a"
build = "build.rs"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.file("a/build.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.5.0"
[dependencies]
foo = { path = ".." }
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
#[test]
fn rename_with_link_search_path() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[lib]
crate-type = ["cdylib"]
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#[no_mangle]
pub extern fn cargo_test_foo() {}
- ");
+ ",
+ );
let p = p.build();
assert_that(p.cargo("build"), execs().with_status(0));
let p2 = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
authors = []
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
use std::env;
use std::fs;
use std::path::PathBuf;
println!("cargo:rustc-link-search={}",
dst.parent().unwrap().display());
}
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern {
#[link_name = "cargo_test_foo"]
fn foo();
fn main() {
unsafe { foo(); }
}
- "#);
+ "#,
+ );
let p2 = p2.build();
// Move the output `libfoo.so` into the directory of `p2`, and then delete
fs::copy(&src, &dst).unwrap();
// copy the import library for windows, if it exists
- drop(fs::copy(&root.join("foo.dll.lib"), p2.root().join("foo.dll.lib")));
+ drop(fs::copy(
+ &root.join("foo.dll.lib"),
+ p2.root().join("foo.dll.lib"),
+ ));
fs::remove_dir_all(p.root()).unwrap();
// Everything should work the first time
- assert_that(p2.cargo("run"),
- execs().with_status(0));
+ assert_that(p2.cargo("run"), execs().with_status(0));
// Now rename the root directory and rerun `cargo run`. Not only should we
// not build anything but we also shouldn't crash.
new.pop();
new.push("bar2");
fs::rename(p2.root(), &new).unwrap();
- assert_that(p2.cargo("run").cwd(&new),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p2.cargo("run").cwd(&new),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] [..]
[RUNNING] [..]
-"));
+",
+ ),
+ );
}
use std::fs::File;
use cargotest::sleep_ms;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn rerun_if_env_changes() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.5.0"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rerun-if-env-changed=FOO");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[FINISHED] [..]
-"));
- assert_that(p.cargo("build").env("FOO", "bar"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build").env("FOO", "bar"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[FINISHED] [..]
-"));
- assert_that(p.cargo("build").env("FOO", "baz"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build").env("FOO", "baz"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[FINISHED] [..]
-"));
- assert_that(p.cargo("build").env("FOO", "baz"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build").env("FOO", "baz"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn rerun_if_env_or_file_changes() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.5.0"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
fn main() {
println!("cargo:rerun-if-env-changed=FOO");
println!("cargo:rerun-if-changed=foo");
}
- "#)
+ "#,
+ )
.file("foo", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[FINISHED] [..]
-"));
- assert_that(p.cargo("build").env("FOO", "bar"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build").env("FOO", "bar"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[FINISHED] [..]
-"));
- assert_that(p.cargo("build").env("FOO", "bar"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build").env("FOO", "bar"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] [..]
-"));
+",
+ ),
+ );
sleep_ms(1000);
File::create(p.root().join("foo")).unwrap();
- assert_that(p.cargo("build").env("FOO", "bar"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").env("FOO", "bar"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
-use cargotest::support::{project, execs, basic_bin_manifest};
-use hamcrest::{assert_that};
+use cargotest::support::{basic_bin_manifest, execs, project};
+use hamcrest::assert_that;
#[test]
fn alias_incorrect_config_type() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
- }"#)
- .file(".cargo/config",r#"
+ }"#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[alias]
b-cargo-test = 5
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("b-cargo-test").arg("-v"),
- execs().with_status(101).
- with_stderr_contains("[ERROR] invalid configuration \
+ assert_that(
+ p.cargo("b-cargo-test").arg("-v"),
+ execs().with_status(101).with_stderr_contains(
+ "[ERROR] invalid configuration \
for key `alias.b-cargo-test`
-expected a list, but found a integer for [..]"));
+expected a list, but found a integer for [..]",
+ ),
+ );
}
-
#[test]
fn alias_default_config_overrides_config() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
- }"#)
- .file(".cargo/config",r#"
+ }"#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[alias]
b = "not_build"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("b").arg("-v"),
- execs().with_status(0).
- with_stderr_contains("[COMPILING] foo v0.5.0 [..]"));
+ assert_that(
+ p.cargo("b").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[COMPILING] foo v0.5.0 [..]"),
+ );
}
#[test]
fn alias_config() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
- }"#)
- .file(".cargo/config",r#"
+ }"#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[alias]
b-cargo-test = "build"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("b-cargo-test").arg("-v"),
- execs().with_status(0).
- with_stderr_contains("[COMPILING] foo v0.5.0 [..]
-[RUNNING] `rustc --crate-name foo [..]"));
+ assert_that(
+ p.cargo("b-cargo-test").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "[COMPILING] foo v0.5.0 [..]
+[RUNNING] `rustc --crate-name foo [..]",
+ ),
+ );
}
#[test]
fn alias_list_test() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
- }"#)
- .file(".cargo/config",r#"
+ }"#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[alias]
b-cargo-test = ["build", "--release"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("b-cargo-test").arg("-v"),
- execs().with_status(0).
- with_stderr_contains("[COMPILING] foo v0.5.0 [..]").
- with_stderr_contains("[RUNNING] `rustc --crate-name [..]")
- );
+ assert_that(
+ p.cargo("b-cargo-test").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[COMPILING] foo v0.5.0 [..]")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name [..]"),
+ );
}
#[test]
fn alias_with_flags_config() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
- }"#)
- .file(".cargo/config",r#"
+ }"#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[alias]
b-cargo-test = "build --release"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("b-cargo-test").arg("-v"),
- execs().with_status(0).
- with_stderr_contains("[COMPILING] foo v0.5.0 [..]").
- with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]")
- );
+ assert_that(
+ p.cargo("b-cargo-test").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[COMPILING] foo v0.5.0 [..]")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]"),
+ );
}
#[test]
fn cant_shadow_builtin() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
- }"#)
- .file(".cargo/config",r#"
+ }"#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[alias]
build = "fetch"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
use cargo;
use cargotest::cargo_process;
use cargotest::support::paths::{self, CargoPathExt};
-use cargotest::support::{execs, project, Project, basic_bin_manifest};
+use cargotest::support::{basic_bin_manifest, execs, project, Project};
use hamcrest::{assert_that, existing_file};
-#[cfg_attr(windows,allow(dead_code))]
+#[cfg_attr(windows, allow(dead_code))]
enum FakeKind<'a> {
Executable,
- Symlink{target:&'a Path},
+ Symlink { target: &'a Path },
}
/// Add an empty file with executable flags (and platform-dependent suffix).
/// TODO: move this to `Project` if other cases using this emerge.
fn fake_file(proj: Project, dir: &Path, name: &str, kind: &FakeKind) -> Project {
- let path = proj.root().join(dir).join(&format!("{}{}", name,
- env::consts::EXE_SUFFIX));
+ let path = proj.root()
+ .join(dir)
+ .join(&format!("{}{}", name, env::consts::EXE_SUFFIX));
path.parent().unwrap().mkdir_p();
match *kind {
FakeKind::Executable => {
File::create(&path).unwrap();
make_executable(&path);
- },
- FakeKind::Symlink{target} => {
- make_symlink(&path,target);
+ }
+ FakeKind::Symlink { target } => {
+ make_symlink(&path, target);
}
}
return proj;
fn make_executable(_: &Path) {}
#[cfg(unix)]
fn make_symlink(p: &Path, t: &Path) {
- ::std::os::unix::fs::symlink(t,p).expect("Failed to create symlink");
+ ::std::os::unix::fs::symlink(t, p).expect("Failed to create symlink");
}
#[cfg(windows)]
fn make_symlink(_: &Path, _: &Path) {
#[test]
fn list_command_looks_at_path() {
let proj = project("list-non-overlapping").build();
- let proj = fake_file(proj, Path::new("path-test"), "cargo-1", &FakeKind::Executable);
+ let proj = fake_file(
+ proj,
+ Path::new("path-test"),
+ "cargo-1",
+ &FakeKind::Executable,
+ );
let mut pr = cargo_process();
let mut path = path();
path.push(proj.root().join("path-test"));
let path = env::join_paths(path.iter()).unwrap();
- let output = pr.arg("-v").arg("--list")
- .env("PATH", &path);
+ let output = pr.arg("-v").arg("--list").env("PATH", &path);
let output = output.exec_with_output().unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
- assert!(output.contains("\n 1 "), "missing 1: {}", output);
+ assert!(
+ output.contains("\n 1 "),
+ "missing 1: {}",
+ output
+ );
}
// windows and symlinks don't currently agree that well
use cargotest::support::cargo_exe;
let proj = project("list-non-overlapping").build();
- let proj = fake_file(proj, Path::new("path-test"), "cargo-2",
- &FakeKind::Symlink{target:&cargo_exe()});
+ let proj = fake_file(
+ proj,
+ Path::new("path-test"),
+ "cargo-2",
+ &FakeKind::Symlink {
+ target: &cargo_exe(),
+ },
+ );
let mut pr = cargo_process();
let mut path = path();
path.push(proj.root().join("path-test"));
let path = env::join_paths(path.iter()).unwrap();
- let output = pr.arg("-v").arg("--list")
- .env("PATH", &path);
+ let output = pr.arg("-v").arg("--list").env("PATH", &path);
let output = output.exec_with_output().unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
- assert!(output.contains("\n 2 "), "missing 2: {}", output);
+ assert!(
+ output.contains("\n 2 "),
+ "missing 2: {}",
+ output
+ );
}
#[test]
let mut pr = cargo_process();
pr.arg("biuld");
- assert_that(pr,
- execs().with_status(1)
- .with_stderr_contains("\
+ assert_that(
+ pr,
+ execs().with_status(1).with_stderr_contains(
+ "\
error: The subcommand 'biuld' wasn't recognized
<tab>Did you mean 'build'?
-"));
+",
+ ),
+ );
}
// if a subcommand is more than 3 edit distance away, we don't make a suggestion
fn find_closest_dont_correct_nonsense() {
let mut pr = cargo_process();
pr.arg("there-is-no-way-that-there-is-a-command-close-to-this")
- .cwd(&paths::root());
+ .cwd(&paths::root());
- assert_that(pr,
- execs().with_status(101)
- .with_stderr("[ERROR] no such subcommand: \
+ assert_that(
+ pr,
+ execs().with_status(101).with_stderr(
+ "[ERROR] no such subcommand: \
`there-is-no-way-that-there-is-a-command-close-to-this`
-"));
+",
+ ),
+ );
}
#[test]
let mut pr = cargo_process();
pr.arg("invalid-command");
- assert_that(pr,
- execs().with_status(101)
- .with_stderr("[ERROR] no such subcommand: `invalid-command`
-"));
+ assert_that(
+ pr,
+ execs().with_status(101).with_stderr(
+ "[ERROR] no such subcommand: `invalid-command`
+",
+ ),
+ );
}
#[test]
let root = paths::root();
let my_home = root.join("my_home");
fs::create_dir(&my_home).unwrap();
- File::create(&my_home.join("config")).unwrap().write_all(br#"
+ File::create(&my_home.join("config"))
+ .unwrap()
+ .write_all(
+ br#"
[cargo-new]
name = "foo"
email = "bar"
git = false
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
- assert_that(cargo_process()
- .arg("new").arg("foo")
- .env("USER", "foo")
- .env("CARGO_HOME", &my_home),
- execs().with_status(0));
+ assert_that(
+ cargo_process()
+ .arg("new")
+ .arg("foo")
+ .env("USER", "foo")
+ .env("CARGO_HOME", &my_home),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["foo <bar>"]"#));
}
fn cargo_subcommand_env() {
use cargotest::support::cargo_exe;
- let src = format!(r#"
+ let src = format!(
+ r#"
use std::env;
fn main() {{
println!("{{}}", env::var("{}").unwrap());
}}
- "#, cargo::CARGO_ENV);
+ "#,
+ cargo::CARGO_ENV
+ );
let p = project("cargo-envtest")
.file("Cargo.toml", &basic_bin_manifest("cargo-envtest"))
path.push(target_dir);
let path = env::join_paths(path.iter()).unwrap();
- assert_that(pr.arg("envtest").env("PATH", &path),
- execs().with_status(0).with_stdout(cargo.to_str().unwrap()));
+ assert_that(
+ pr.arg("envtest").env("PATH", &path),
+ execs().with_status(0).with_stdout(cargo.to_str().unwrap()),
+ );
}
#[test]
fn cargo_help() {
- assert_that(cargo_process(),
- execs().with_status(0));
- assert_that(cargo_process().arg("help"),
- execs().with_status(0));
- assert_that(cargo_process().arg("-h"),
- execs().with_status(0));
- assert_that(cargo_process().arg("help").arg("build"),
- execs().with_status(0));
- assert_that(cargo_process().arg("build").arg("-h"),
- execs().with_status(0));
- assert_that(cargo_process().arg("help").arg("help"),
- execs().with_status(0));
+ assert_that(cargo_process(), execs().with_status(0));
+ assert_that(cargo_process().arg("help"), execs().with_status(0));
+ assert_that(cargo_process().arg("-h"), execs().with_status(0));
+ assert_that(
+ cargo_process().arg("help").arg("build"),
+ execs().with_status(0),
+ );
+ assert_that(
+ cargo_process().arg("build").arg("-h"),
+ execs().with_status(0),
+ );
+ assert_that(
+ cargo_process().arg("help").arg("help"),
+ execs().with_status(0),
+ );
}
#[test]
fn explain() {
- assert_that(cargo_process().arg("--explain").arg("E0001"),
- execs().with_status(0).with_stdout_contains("\
-This error suggests that the expression arm corresponding to the noted pattern"));
+ assert_that(
+ cargo_process().arg("--explain").arg("E0001"),
+ execs().with_status(0).with_stdout_contains(
+ "\
+ This error suggests that the expression arm corresponding to the noted pattern",
+ ),
+ );
}
use cargotest::ChannelChanger;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn feature_required() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
im-a-teapot = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build")
- .masquerade_as_nightly_cargo(),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
feature `test-dummy-unstable` is required
consider adding `cargo-features = [\"test-dummy-unstable\"]` to the manifest
-"));
-
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
this Cargo does not support nightly features, but if you
switch to nightly channel you can add
`cargo-features = [\"test-dummy-unstable\"]` to enable this feature
-"));
+",
+ ),
+ );
}
#[test]
fn unknown_feature() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["foo"]
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
unknown cargo feature `foo`
-"));
+",
+ ),
+ );
}
#[test]
fn stable_feature_warns() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["test-dummy-stable"]
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
warning: the cargo feature `test-dummy-stable` is now stable and is no longer \
necessary to be listed in the manifest
[COMPILING] a [..]
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn nightly_feature_requires_nightly() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["test-dummy-unstable"]
[package]
version = "0.0.1"
authors = []
im-a-teapot = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build")
- .masquerade_as_nightly_cargo(),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a [..]
[FINISHED] [..]
-"));
-
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \
but this is the `stable` channel
-"));
+",
+ ),
+ );
}
#[test]
fn nightly_feature_requires_nightly_in_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
[dependencies]
a = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
cargo-features = ["test-dummy-unstable"]
[package]
version = "0.0.1"
authors = []
im-a-teapot = true
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build")
- .masquerade_as_nightly_cargo(),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a [..]
[COMPILING] b [..]
[FINISHED] [..]
-"));
-
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to load source for a dependency on `a`
Caused by:
Caused by:
the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \
but this is the `stable` channel
-"));
+",
+ ),
+ );
}
#[test]
fn cant_publish() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["test-dummy-unstable"]
[package]
version = "0.0.1"
authors = []
im-a-teapot = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build")
- .masquerade_as_nightly_cargo(),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a [..]
[FINISHED] [..]
-"));
-
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \
but this is the `stable` channel
-"));
+",
+ ),
+ );
}
#[test]
fn z_flags_rejected() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["test-dummy-unstable"]
[package]
version = "0.0.1"
authors = []
im-a-teapot = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build")
- .arg("-Zprint-im-a-teapot"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-Zprint-im-a-teapot"),
+ execs().with_status(101).with_stderr(
+ "\
error: the `-Z` flag is only accepted on the nightly channel of Cargo
-"));
-
- assert_that(p.cargo("build")
- .masquerade_as_nightly_cargo()
- .arg("-Zarg"),
- execs().with_status(101)
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo().arg("-Zarg"),
+ execs().with_status(101).with_stderr(
+ "\
error: unknown `-Z` flag specified: arg
-"));
-
- assert_that(p.cargo("build")
- .masquerade_as_nightly_cargo()
- .arg("-Zprint-im-a-teapot"),
- execs().with_status(0)
- .with_stdout("im-a-teapot = true\n")
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zprint-im-a-teapot"),
+ execs()
+ .with_status(0)
+ .with_stdout("im-a-teapot = true\n")
+ .with_stderr(
+ "\
[COMPILING] a [..]
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn publish_rejected() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["test-dummy-unstable"]
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("publish")
- .masquerade_as_nightly_cargo(),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("publish").masquerade_as_nightly_cargo(),
+ execs().with_status(101).with_stderr(
+ "\
error: cannot publish crates which activate nightly-only cargo features to crates.io
-"));
+",
+ ),
+ );
}
use std::fmt;
-use std::path::{PathBuf, Path};
+use std::path::{Path, PathBuf};
-use hamcrest::{Matcher, MatchResult, existing_file};
+use hamcrest::{existing_file, MatchResult, Matcher};
use cargotest::support::paths;
pub struct InstalledExe(pub &'static str);
pub fn exe(name: &str) -> String {
- if cfg!(windows) {format!("{}.exe", name)} else {name.to_string()}
+ if cfg!(windows) {
+ format!("{}.exe", name)
+ } else {
+ name.to_string()
+ }
}
impl<P: AsRef<Path>> Matcher<P> for InstalledExe {
}
pub fn is_nightly() -> bool {
- RUSTC.with(|r| {
- r.verbose_version.contains("-nightly") ||
- r.verbose_version.contains("-dev")
- })
+ RUSTC.with(|r| r.verbose_version.contains("-nightly") || r.verbose_version.contains("-dev"))
}
pub fn process<T: AsRef<OsStr>>(t: T) -> cargo::util::ProcessBuilder {
.env_remove("GIT_COMMITTER_NAME")
.env_remove("GIT_COMMITTER_EMAIL")
.env_remove("CARGO_TARGET_DIR") // we assume 'target'
- .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows
- return p
+ .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows
+ return p;
}
pub trait ChannelChanger: Sized {
use std::env;
use std::process::Command;
use std::sync::{Once, ONCE_INIT};
-use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering};
+use std::sync::atomic::{AtomicBool, Ordering, ATOMIC_BOOL_INIT};
-use cargotest::support::{project, main_file, basic_bin_manifest};
+use cargotest::support::{basic_bin_manifest, main_file, project};
pub fn disabled() -> bool {
// First, disable if ./configure requested so
// Right now the windows bots cannot cross compile due to the mingw setup,
// so we disable ourselves on all but macos/linux setups where the rustc
// install script ensures we have both architectures
- if !(cfg!(target_os = "macos") ||
- cfg!(target_os = "linux") ||
- cfg!(target_env = "msvc")) {
+ if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) {
return true;
}
.build();
let result = p.cargo("build")
- .arg("--target").arg(&cross_target)
+ .arg("--target")
+ .arg(&cross_target)
.exec_with_output();
if result.is_ok() {
let linux_help = if cfg!(target_os = "linux") {
"
-You may need to install runtime libraries for your Linux distribution as well.".to_string()
+You may need to install runtime libraries for your Linux distribution as well."
+ .to_string()
} else {
"".to_string()
};
let rustup_help = if rustup_available {
- format!("
+ format!(
+ "
Alternatively, you can install the necessary libraries for cross-compilation with
- rustup target add {}{}", cross_target, linux_help)
+ rustup target add {}{}",
+ cross_target, linux_help
+ )
} else {
"".to_string()
};
- panic!("Cannot cross compile to {}.
+ panic!(
+ "Cannot cross compile to {}.
This failure can be safely ignored. If you would prefer to not see this
failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\".{}
-", cross_target, rustup_help);
+",
+ cross_target, rustup_help
+ );
}
pub fn alternate() -> String {
use git2;
use url::Url;
-use cargotest::support::{ProjectBuilder, Project, project, path2url};
+use cargotest::support::{project, Project, ProjectBuilder, path2url};
#[must_use]
pub struct RepoBuilder {
pub struct Repository(git2::Repository);
-pub fn repo(p: &Path) -> RepoBuilder { RepoBuilder::init(p) }
+pub fn repo(p: &Path) -> RepoBuilder {
+ RepoBuilder::init(p)
+}
impl RepoBuilder {
pub fn init(p: &Path) -> RepoBuilder {
t!(config.set_str("user.name", "name"));
t!(config.set_str("user.email", "email"));
}
- RepoBuilder { repo, files: Vec::new() }
+ RepoBuilder {
+ repo,
+ files: Vec::new(),
+ }
}
pub fn file(self, path: &str, contents: &str) -> RepoBuilder {
let id = t!(index.write_tree());
let tree = t!(self.repo.find_tree(id));
let sig = t!(self.repo.signature());
- t!(self.repo.commit(Some("HEAD"), &sig, &sig,
- "Initial commit", &tree, &[]));
+ t!(self.repo
+ .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[]));
}
- let RepoBuilder{ repo, .. } = self;
+ let RepoBuilder { repo, .. } = self;
Repository(repo)
}
}
}
pub fn new<F>(name: &str, callback: F) -> Result<Project, ProcessError>
- where F: FnOnce(ProjectBuilder) -> ProjectBuilder
+where
+ F: FnOnce(ProjectBuilder) -> ProjectBuilder,
{
let mut git_project = project(name);
git_project = callback(git_project);
t!(submodule.add_to_index(false));
}
let mut index = t!(repo.index());
- t!(index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT,
- Some(&mut (|a, _b| {
- if s.iter().any(|s| a.starts_with(s.path())) {1} else {0}
- }))));
+ t!(index.add_all(
+ ["*"].iter(),
+ git2::IndexAddOption::DEFAULT,
+ Some(
+ &mut (|a, _b| if s.iter().any(|s| a.starts_with(s.path())) {
+ 1
+ } else {
+ 0
+ })
+ )
+ ));
t!(index.write());
}
-pub fn add_submodule<'a>(repo: &'a git2::Repository, url: &str,
- path: &Path) -> git2::Submodule<'a>
-{
+pub fn add_submodule<'a>(
+ repo: &'a git2::Repository,
+ url: &str,
+ path: &Path,
+) -> git2::Submodule<'a> {
let path = path.to_str().unwrap().replace(r"\", "/");
let mut s = t!(repo.submodule(url, Path::new(&path), false));
let subrepo = t!(s.open());
None => {}
}
let parents = parents.iter().collect::<Vec<_>>();
- t!(repo.commit(Some("HEAD"), &sig, &sig, "test",
- &t!(repo.find_tree(tree_id)),
- &parents))
+ t!(repo.commit(
+ Some("HEAD"),
+ &sig,
+ &sig,
+ "test",
+ &t!(repo.find_tree(tree_id)),
+ &parents
+ ))
}
pub fn tag(repo: &git2::Repository, name: &str) {
let head = repo.head().unwrap().target().unwrap();
- t!(repo.tag(name,
- &t!(repo.find_object(head, None)),
- &t!(repo.signature()),
- "make a new tag",
- false));
+ t!(repo.tag(
+ name,
+ &t!(repo.find_object(head, None)),
+ &t!(repo.signature()),
+ "make a new tag",
+ false
+ ));
}
use url::Url;
use hamcrest as ham;
use cargo::util::ProcessBuilder;
-use cargo::util::{ProcessError};
+use cargo::util::ProcessError;
use cargotest::support::paths::CargoPathExt;
*
*/
-#[derive(PartialEq,Clone)]
+#[derive(PartialEq, Clone)]
struct FileBuilder {
path: PathBuf,
- body: String
+ body: String,
}
impl FileBuilder {
pub fn new(path: PathBuf, body: &str) -> FileBuilder {
- FileBuilder { path, body: body.to_string() }
+ FileBuilder {
+ path,
+ body: body.to_string(),
+ }
}
fn mk(&self) {
self.dirname().mkdir_p();
- let mut file = fs::File::create(&self.path).unwrap_or_else(|e| {
- panic!("could not create file {}: {}", self.path.display(), e)
- });
+ let mut file = fs::File::create(&self.path)
+ .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e));
t!(file.write_all(self.body.as_bytes()));
}
}
}
-#[derive(PartialEq,Clone)]
+#[derive(PartialEq, Clone)]
struct SymlinkBuilder {
dst: PathBuf,
src: PathBuf,
}
}
-#[derive(PartialEq,Clone)]
-pub struct Project{
+#[derive(PartialEq, Clone)]
+pub struct Project {
root: PathBuf,
}
#[must_use]
-#[derive(PartialEq,Clone)]
+#[derive(PartialEq, Clone)]
pub struct ProjectBuilder {
name: String,
root: Project,
pub fn new(name: &str, root: PathBuf) -> ProjectBuilder {
ProjectBuilder {
name: name.to_string(),
- root: Project{ root },
+ root: Project { root },
files: vec![],
symlinks: vec![],
}
}
- pub fn file<B: AsRef<Path>>(mut self, path: B,
- body: &str) -> Self {
+ pub fn file<B: AsRef<Path>>(mut self, path: B, body: &str) -> Self {
self._file(path.as_ref(), body);
self
}
fn _file(&mut self, path: &Path, body: &str) {
- self.files.push(FileBuilder::new(self.root.root.join(path), body));
+ self.files
+ .push(FileBuilder::new(self.root.root.join(path), body));
}
- pub fn symlink<T: AsRef<Path>>(mut self, dst: T,
- src: T) -> Self {
- self.symlinks.push(SymlinkBuilder::new(self.root.root.join(dst),
- self.root.root.join(src)));
+ pub fn symlink<T: AsRef<Path>>(mut self, dst: T, src: T) -> Self {
+ self.symlinks.push(SymlinkBuilder::new(
+ self.root.root.join(dst),
+ self.root.root.join(src),
+ ));
self
}
symlink.mk();
}
- let ProjectBuilder{ name: _, root, files: _, symlinks: _, .. } = self;
+ let ProjectBuilder {
+ name: _,
+ root,
+ files: _,
+ symlinks: _,
+ ..
+ } = self;
root
}
self.build_dir().join("debug")
}
- pub fn url(&self) -> Url { path2url(self.root()) }
+ pub fn url(&self) -> Url {
+ path2url(self.root())
+ }
pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf {
let prefix = Project::get_lib_prefix(kind);
let extension = Project::get_lib_extension(kind);
- let lib_file_name = format!("{}{}.{}",
- prefix,
- name,
- extension);
+ let lib_file_name = format!("{}{}.{}", prefix, name, extension);
self.target_debug_dir()
.join("examples")
}
pub fn bin(&self, b: &str) -> PathBuf {
- self.build_dir().join("debug").join(&format!("{}{}", b,
- env::consts::EXE_SUFFIX))
+ self.build_dir()
+ .join("debug")
+ .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
}
pub fn release_bin(&self, b: &str) -> PathBuf {
- self.build_dir().join("release").join(&format!("{}{}", b,
- env::consts::EXE_SUFFIX))
+ self.build_dir()
+ .join("release")
+ .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
}
pub fn target_bin(&self, target: &str, b: &str) -> PathBuf {
- self.build_dir().join(target).join("debug")
- .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
+ self.build_dir().join(target).join("debug").join(&format!(
+ "{}{}",
+ b,
+ env::consts::EXE_SUFFIX
+ ))
}
pub fn change_file(&self, path: &str, body: &str) {
pub fn process<T: AsRef<OsStr>>(&self, program: T) -> ProcessBuilder {
let mut p = ::cargotest::process(program);
p.cwd(self.root());
- return p
+ return p;
}
pub fn cargo(&self, cmd: &str) -> ProcessBuilder {
pub fn read_lockfile(&self) -> String {
let mut buffer = String::new();
- fs::File::open(self.root().join("Cargo.lock")).unwrap()
- .read_to_string(&mut buffer).unwrap();
+ fs::File::open(self.root().join("Cargo.lock"))
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
buffer
}
"lib"
}
}
- _ => unreachable!()
+ _ => unreachable!(),
}
}
"dylib" | "proc-macro" => {
if cfg!(windows) {
"dll"
- } else if cfg!(target_os="macos") {
+ } else if cfg!(target_os = "macos") {
"dylib"
} else {
"so"
}
}
- _ => unreachable!()
+ _ => unreachable!(),
}
}
}
fn with_err_msg(self, val: String) -> Result<T, String> {
match self {
Ok(val) => Ok(val),
- Err(err) => Err(format!("{}; original={}", val, err))
+ Err(err) => Err(format!("{}; original={}", val, err)),
}
}
}
// Path to cargo executables
pub fn cargo_dir() -> PathBuf {
- env::var_os("CARGO_BIN_PATH").map(PathBuf::from).or_else(|| {
- env::current_exe().ok().map(|mut path| {
- path.pop();
- if path.ends_with("deps") {
+ env::var_os("CARGO_BIN_PATH")
+ .map(PathBuf::from)
+ .or_else(|| {
+ env::current_exe().ok().map(|mut path| {
path.pop();
- }
- path
+ if path.ends_with("deps") {
+ path.pop();
+ }
+ path
+ })
})
- }).unwrap_or_else(|| {
- panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test")
- })
+ .unwrap_or_else(|| panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test"))
}
pub fn cargo_exe() -> PathBuf {
}
pub fn with_stdout_contains_n<S: ToString>(mut self, expected: S, number: usize) -> Execs {
- self.expect_stdout_contains_n.push((expected.to_string(), number));
+ self.expect_stdout_contains_n
+ .push((expected.to_string(), number));
self
}
}
pub fn with_json(mut self, expected: &str) -> Execs {
- self.expect_json = Some(expected.split("\n\n").map(|obj| {
- obj.parse().unwrap()
- }).collect());
+ self.expect_json = Some(
+ expected
+ .split("\n\n")
+ .map(|obj| obj.parse().unwrap())
+ .collect(),
+ );
self
}
match self.expect_exit_code {
None => Ok(()),
Some(code) if actual.status.code() == Some(code) => Ok(()),
- Some(_) => {
- Err(format!("exited with {}\n--- stdout\n{}\n--- stderr\n{}",
- actual.status,
- String::from_utf8_lossy(&actual.stdout),
- String::from_utf8_lossy(&actual.stderr)))
- }
+ Some(_) => Err(format!(
+ "exited with {}\n--- stdout\n{}\n--- stderr\n{}",
+ actual.status,
+ String::from_utf8_lossy(&actual.stdout),
+ String::from_utf8_lossy(&actual.stderr)
+ )),
}
}
fn match_stdout(&self, actual: &Output) -> ham::MatchResult {
- self.match_std(self.expect_stdout.as_ref(), &actual.stdout,
- "stdout", &actual.stderr, MatchKind::Exact)?;
+ self.match_std(
+ self.expect_stdout.as_ref(),
+ &actual.stdout,
+ "stdout",
+ &actual.stderr,
+ MatchKind::Exact,
+ )?;
for expect in self.expect_stdout_contains.iter() {
- self.match_std(Some(expect), &actual.stdout, "stdout",
- &actual.stderr, MatchKind::Partial)?;
+ self.match_std(
+ Some(expect),
+ &actual.stdout,
+ "stdout",
+ &actual.stderr,
+ MatchKind::Partial,
+ )?;
}
for expect in self.expect_stderr_contains.iter() {
- self.match_std(Some(expect), &actual.stderr, "stderr",
- &actual.stdout, MatchKind::Partial)?;
+ self.match_std(
+ Some(expect),
+ &actual.stderr,
+ "stderr",
+ &actual.stdout,
+ MatchKind::Partial,
+ )?;
}
for &(ref expect, number) in self.expect_stdout_contains_n.iter() {
- self.match_std(Some(&expect), &actual.stdout, "stdout",
- &actual.stderr, MatchKind::PartialN(number))?;
+ self.match_std(
+ Some(&expect),
+ &actual.stdout,
+ "stdout",
+ &actual.stderr,
+ MatchKind::PartialN(number),
+ )?;
}
for expect in self.expect_stdout_not_contains.iter() {
- self.match_std(Some(expect), &actual.stdout, "stdout",
- &actual.stderr, MatchKind::NotPresent)?;
+ self.match_std(
+ Some(expect),
+ &actual.stdout,
+ "stdout",
+ &actual.stderr,
+ MatchKind::NotPresent,
+ )?;
}
for expect in self.expect_stderr_not_contains.iter() {
- self.match_std(Some(expect), &actual.stderr, "stderr",
- &actual.stdout, MatchKind::NotPresent)?;
+ self.match_std(
+ Some(expect),
+ &actual.stderr,
+ "stderr",
+ &actual.stdout,
+ MatchKind::NotPresent,
+ )?;
}
for expect in self.expect_neither_contains.iter() {
- self.match_std(Some(expect), &actual.stdout, "stdout",
- &actual.stdout, MatchKind::NotPresent)?;
-
- self.match_std(Some(expect), &actual.stderr, "stderr",
- &actual.stderr, MatchKind::NotPresent)?;
+ self.match_std(
+ Some(expect),
+ &actual.stdout,
+ "stdout",
+ &actual.stdout,
+ MatchKind::NotPresent,
+ )?;
+
+ self.match_std(
+ Some(expect),
+ &actual.stderr,
+ "stderr",
+ &actual.stderr,
+ MatchKind::NotPresent,
+ )?;
}
for expect in self.expect_either_contains.iter() {
- let match_std = self.match_std(Some(expect), &actual.stdout, "stdout",
- &actual.stdout, MatchKind::Partial);
- let match_err = self.match_std(Some(expect), &actual.stderr, "stderr",
- &actual.stderr, MatchKind::Partial);
+ let match_std = self.match_std(
+ Some(expect),
+ &actual.stdout,
+ "stdout",
+ &actual.stdout,
+ MatchKind::Partial,
+ );
+ let match_err = self.match_std(
+ Some(expect),
+ &actual.stderr,
+ "stderr",
+ &actual.stderr,
+ MatchKind::Partial,
+ );
if let (Err(_), Err(_)) = (match_std, match_err) {
- Err(format!("expected to find:\n\
- {}\n\n\
- did not find in either output.", expect))?;
+ Err(format!(
+ "expected to find:\n\
+ {}\n\n\
+ did not find in either output.",
+ expect
+ ))?;
}
}
.map_err(|_| "stdout was not utf8 encoded".to_owned())?;
let lines = stdout.lines().collect::<Vec<_>>();
if lines.len() != objects.len() {
- return Err(format!("expected {} json lines, got {}, stdout:\n{}",
- objects.len(), lines.len(), stdout));
+ return Err(format!(
+ "expected {} json lines, got {}, stdout:\n{}",
+ objects.len(),
+ lines.len(),
+ stdout
+ ));
}
for (obj, line) in objects.iter().zip(lines) {
self.match_json(obj, line)?;
}
fn match_stderr(&self, actual: &Output) -> ham::MatchResult {
- self.match_std(self.expect_stderr.as_ref(), &actual.stderr,
- "stderr", &actual.stdout, MatchKind::Exact)
- }
-
- fn match_std(&self, expected: Option<&String>, actual: &[u8],
- description: &str, extra: &[u8],
- kind: MatchKind) -> ham::MatchResult {
+ self.match_std(
+ self.expect_stderr.as_ref(),
+ &actual.stderr,
+ "stderr",
+ &actual.stdout,
+ MatchKind::Exact,
+ )
+ }
+
+ fn match_std(
+ &self,
+ expected: Option<&String>,
+ actual: &[u8],
+ description: &str,
+ extra: &[u8],
+ kind: MatchKind,
+ ) -> ham::MatchResult {
let out = match expected {
Some(out) => out,
None => return Ok(()),
};
let actual = match str::from_utf8(actual) {
- Err(..) => return Err(format!("{} was not utf8 encoded",
- description)),
+ Err(..) => return Err(format!("{} was not utf8 encoded", description)),
Ok(actual) => actual,
};
// Let's not deal with \r\n vs \n on windows...
if diffs.is_empty() {
Ok(())
} else {
- Err(format!("differences:\n\
- {}\n\n\
- other output:\n\
- `{}`", diffs.join("\n"),
- String::from_utf8_lossy(extra)))
+ Err(format!(
+ "differences:\n\
+ {}\n\n\
+ other output:\n\
+ `{}`",
+ diffs.join("\n"),
+ String::from_utf8_lossy(extra)
+ ))
}
}
MatchKind::Partial => {
if diffs.is_empty() {
Ok(())
} else {
- Err(format!("expected to find:\n\
- {}\n\n\
- did not find in output:\n\
- {}", out,
- actual))
+ Err(format!(
+ "expected to find:\n\
+ {}\n\n\
+ did not find in output:\n\
+ {}",
+ out, actual
+ ))
}
}
MatchKind::PartialN(number) => {
if matches == number {
Ok(())
} else {
- Err(format!("expected to find {} occurrences:\n\
- {}\n\n\
- did not find in output:\n\
- {}", number, out,
- actual))
+ Err(format!(
+ "expected to find {} occurrences:\n\
+ {}\n\n\
+ did not find in output:\n\
+ {}",
+ number, out, actual
+ ))
}
}
MatchKind::NotPresent => {
if !actual.contains(out) {
Ok(())
} else {
- Err(format!("expected not to find:\n\
- {}\n\n\
- but found in output:\n\
- {}", out,
- actual))
+ Err(format!(
+ "expected not to find:\n\
+ {}\n\n\
+ but found in output:\n\
+ {}",
+ out, actual
+ ))
}
}
}
fn match_json(&self, expected: &Value, line: &str) -> ham::MatchResult {
let actual = match line.parse() {
- Err(e) => return Err(format!("invalid json, {}:\n`{}`", e, line)),
- Ok(actual) => actual,
+ Err(e) => return Err(format!("invalid json, {}:\n`{}`", e, line)),
+ Ok(actual) => actual,
};
match find_mismatch(expected, &actual) {
}
}
- fn diff_lines<'a>(&self, actual: str::Lines<'a>, expected: str::Lines<'a>,
- partial: bool) -> Vec<String> {
+ fn diff_lines<'a>(
+ &self,
+ actual: str::Lines<'a>,
+ expected: str::Lines<'a>,
+ partial: bool,
+ ) -> Vec<String> {
let actual = actual.take(if partial {
expected.clone().count()
} else {
usize::MAX
});
- zip_all(actual, expected).enumerate().filter_map(|(i, (a,e))| {
- match (a, e) {
+ zip_all(actual, expected)
+ .enumerate()
+ .filter_map(|(i, (a, e))| match (a, e) {
(Some(a), Some(e)) => {
if lines_match(&e, &a) {
None
} else {
Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a))
}
- },
- (Some(a), None) => {
- Some(format!("{:3} -\n + |{}|\n", i, a))
- },
- (None, Some(e)) => {
- Some(format!("{:3} - |{}|\n +\n", i, e))
- },
- (None, None) => panic!("Cannot get here")
- }
- }).collect()
+ }
+ (Some(a), None) => Some(format!("{:3} -\n + |{}|\n", i, a)),
+ (None, Some(e)) => Some(format!("{:3} - |{}|\n +\n", i, e)),
+ (None, None) => panic!("Cannot get here"),
+ })
+ .collect()
}
}
match actual.find(part) {
Some(j) => {
if i == 0 && j != 0 {
- return false
+ return false;
}
actual = &actual[j + part.len()..];
}
- None => {
- return false
- }
+ None => return false,
}
}
actual.is_empty() || expected.ends_with("[..]")
// as paths). You can use a `"{...}"` string literal as a wildcard for
// arbitrary nested JSON (useful for parts of object emitted by other programs
// (e.g. rustc) rather than Cargo itself). Arrays are sorted before comparison.
-fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value)
- -> Option<(&'a Value, &'a Value)> {
+fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> {
use serde_json::Value::*;
match (expected, actual) {
(&Number(ref l), &Number(ref r)) if l == r => None,
let mut l = l.iter().collect::<Vec<_>>();
let mut r = r.iter().collect::<Vec<_>>();
- l.retain(|l| {
- match r.iter().position(|r| find_mismatch(l, r).is_none()) {
+ l.retain(
+ |l| match r.iter().position(|r| find_mismatch(l, r).is_none()) {
Some(i) => {
r.remove(i);
false
}
- None => true
- }
- });
+ None => true,
+ },
+ );
if l.len() > 0 {
assert!(r.len() > 0);
return Some((expected, actual));
}
- l.values().zip(r.values())
- .filter_map(|(l, r)| find_mismatch(l, r))
- .nth(0)
+ l.values()
+ .zip(r.values())
+ .filter_map(|(l, r)| find_mismatch(l, r))
+ .nth(0)
}
(&Null, &Null) => None,
// magic string literal "{...}" acts as wildcard for any sub-JSON
(&String(ref l), _) if l == "{...}" => None,
_ => Some((expected, actual)),
}
-
}
struct ZipAll<I1: Iterator, I2: Iterator> {
second: I2,
}
-impl<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>> Iterator for ZipAll<I1, I2> {
+impl<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>> Iterator for ZipAll<I1, I2> {
type Item = (Option<T>, Option<T>);
fn next(&mut self) -> Option<(Option<T>, Option<T>)> {
let first = self.first.next();
match (first, second) {
(None, None) => None,
- (a, b) => Some((a, b))
+ (a, b) => Some((a, b)),
}
}
}
-fn zip_all<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>>(a: I1, b: I2) -> ZipAll<I1, I2> {
+fn zip_all<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>>(a: I1, b: I2) -> ZipAll<I1, I2> {
ZipAll {
first: a,
second: b,
Ok(out) => self.match_output(&out),
Err(e) => {
let err = e.downcast_ref::<ProcessError>();
- if let Some(&ProcessError { output: Some(ref out), .. }) = err {
- return self.match_output(out)
+ if let Some(&ProcessError {
+ output: Some(ref out),
+ ..
+ }) = err
+ {
+ return self.match_output(out);
}
let mut s = format!("could not exec process {}: {}", process, e);
for cause in e.causes() {
}
pub fn basic_bin_manifest(name: &str) -> String {
- format!(r#"
+ format!(
+ r#"
[package]
name = "{}"
[[bin]]
name = "{}"
- "#, name, name)
+ "#,
+ name, name
+ )
}
pub fn basic_lib_manifest(name: &str) -> String {
- format!(r#"
+ format!(
+ r#"
[package]
name = "{}"
[lib]
name = "{}"
- "#, name, name)
+ "#,
+ name, name
+ )
}
pub fn path2url(p: PathBuf) -> Url {
fn substitute_macros(input: &str) -> String {
let macros = [
- ("[RUNNING]", " Running"),
- ("[COMPILING]", " Compiling"),
- ("[CREATED]", " Created"),
- ("[FINISHED]", " Finished"),
- ("[ERROR]", "error:"),
- ("[WARNING]", "warning:"),
+ ("[RUNNING]", " Running"),
+ ("[COMPILING]", " Compiling"),
+ ("[CREATED]", " Created"),
+ ("[FINISHED]", " Finished"),
+ ("[ERROR]", "error:"),
+ ("[WARNING]", "warning:"),
("[DOCUMENTING]", " Documenting"),
- ("[FRESH]", " Fresh"),
- ("[UPDATING]", " Updating"),
- ("[ADDING]", " Adding"),
- ("[REMOVING]", " Removing"),
- ("[DOCTEST]", " Doc-tests"),
- ("[PACKAGING]", " Packaging"),
+ ("[FRESH]", " Fresh"),
+ ("[UPDATING]", " Updating"),
+ ("[ADDING]", " Adding"),
+ ("[REMOVING]", " Removing"),
+ ("[DOCTEST]", " Doc-tests"),
+ ("[PACKAGING]", " Packaging"),
("[DOWNLOADING]", " Downloading"),
- ("[UPLOADING]", " Uploading"),
- ("[VERIFYING]", " Verifying"),
- ("[ARCHIVING]", " Archiving"),
- ("[INSTALLING]", " Installing"),
- ("[REPLACING]", " Replacing"),
- ("[UNPACKING]", " Unpacking"),
- ("[SUMMARY]", " Summary"),
- ("[EXE]", if cfg!(windows) {".exe"} else {""}),
- ("[/]", if cfg!(windows) {"\\"} else {"/"}),
+ ("[UPLOADING]", " Uploading"),
+ ("[VERIFYING]", " Verifying"),
+ ("[ARCHIVING]", " Archiving"),
+ ("[INSTALLING]", " Installing"),
+ ("[REPLACING]", " Replacing"),
+ ("[UNPACKING]", " Unpacking"),
+ ("[SUMMARY]", " Summary"),
+ ("[EXE]", if cfg!(windows) { ".exe" } else { "" }),
+ ("[/]", if cfg!(windows) { "\\" } else { "/" }),
];
let mut result = input.to_owned();
for &(pat, subst) in macros.iter() {
use std::io::{self, ErrorKind};
use std::path::{Path, PathBuf};
use std::sync::{Once, ONCE_INIT};
-use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
+use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
use filetime::{self, FileTime};
-static CARGO_INTEGRATION_TEST_DIR : &'static str = "cit";
+static CARGO_INTEGRATION_TEST_DIR: &'static str = "cit";
static NEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT;
thread_local!(static TASK_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst));
});
LOCAL_INIT.with(|i| {
if i.get() {
- return
+ return;
}
i.set(true);
root().rm_rf();
}
fn move_in_time<F>(&self, travel_amount: F)
- where F: Fn(u64, u32) -> (u64, u32);
+ where
+ F: Fn(u64, u32) -> (u64, u32);
}
impl CargoPathExt for Path {
*/
fn rm_rf(&self) {
if !self.exists() {
- return
+ return;
}
for file in t!(fs::read_dir(self)) {
}
fn mkdir_p(&self) {
- fs::create_dir_all(self).unwrap_or_else(|e| {
- panic!("failed to mkdir_p {}: {}", self.display(), e)
- })
+ fs::create_dir_all(self)
+ .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e))
}
fn move_in_time<F>(&self, travel_amount: F)
- where F: Fn(u64, u32) -> ((u64, u32)),
+ where
+ F: Fn(u64, u32) -> ((u64, u32)),
{
if self.is_file() {
time_travel(self, &travel_amount);
}
fn recurse<F>(p: &Path, bad: &Path, travel_amount: &F)
- where F: Fn(u64, u32) -> ((u64, u32)),
+ where
+ F: Fn(u64, u32) -> ((u64, u32)),
{
if p.is_file() {
time_travel(p, travel_amount)
}
fn time_travel<F>(path: &Path, travel_amount: &F)
- where F: Fn(u64, u32) -> ((u64, u32)),
+ where
+ F: Fn(u64, u32) -> ((u64, u32)),
{
let stat = t!(path.metadata());
// Sadly change_file_times has a failure mode where a readonly file
// cannot have its times changed on windows.
- do_op(path, "set file times",
- |path| filetime::set_file_times(path, newtime, newtime));
+ do_op(path, "set file times", |path| {
+ filetime::set_file_times(path, newtime, newtime)
+ });
}
}
}
fn do_op<F>(path: &Path, desc: &str, mut f: F)
- where F: FnMut(&Path) -> io::Result<()>
+where
+ F: FnMut(&Path) -> io::Result<()>,
{
match f(path) {
Ok(()) => {}
- Err(ref e) if cfg!(windows) &&
- e.kind() == ErrorKind::PermissionDenied => {
+ Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => {
let mut p = t!(path.metadata()).permissions();
p.set_readonly(false);
t!(fs::set_permissions(path, p));
pub fn setup() -> Repository {
let config = paths::root().join(".cargo/config");
t!(fs::create_dir_all(config.parent().unwrap()));
- t!(t!(File::create(&config)).write_all(format!(r#"
+ t!(t!(File::create(&config)).write_all(
+ format!(
+ r#"
[registry]
token = "api-token"
[registries.alternative]
index = "{registry}"
- "#, registry = registry().to_string()).as_bytes()));
+ "#,
+ registry = registry().to_string()
+ ).as_bytes()
+ ));
let credentials = paths::root().join("home/.cargo/credentials");
t!(fs::create_dir_all(credentials.parent().unwrap()));
- t!(t!(File::create(&credentials)).write_all(br#"
+ t!(t!(File::create(&credentials)).write_all(
+ br#"
[registries.alternative]
token = "api-token"
- "#));
+ "#
+ ));
t!(fs::create_dir_all(&upload_path().join("api/v1/crates")));
repo(®istry_path())
- .file("config.json", &format!(r#"{{
+ .file(
+ "config.json",
+ &format!(
+ r#"{{
"dl": "{0}",
"api": "{0}"
- }}"#, upload()))
+ }}"#,
+ upload()
+ ),
+ )
.build()
}
-fn registry_path() -> PathBuf { paths::root().join("registry") }
-pub fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() }
-pub fn upload_path() -> PathBuf { paths::root().join("upload") }
-fn upload() -> Url { Url::from_file_path(&*upload_path()).ok().unwrap() }
+fn registry_path() -> PathBuf {
+ paths::root().join("registry")
+}
+pub fn registry() -> Url {
+ Url::from_file_path(&*registry_path()).ok().unwrap()
+}
+pub fn upload_path() -> PathBuf {
+ paths::root().join("upload")
+}
+fn upload() -> Url {
+ Url::from_file_path(&*upload_path()).ok().unwrap()
+}
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::prelude::*;
-use std::path::{PathBuf, Path};
+use std::path::{Path, PathBuf};
use cargo::util::Sha256;
use flate2::Compression;
use cargotest::support::paths;
use cargotest::support::git::repo;
-pub fn registry_path() -> PathBuf { paths::root().join("registry") }
-pub fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() }
-pub fn dl_path() -> PathBuf { paths::root().join("dl") }
-pub fn dl_url() -> Url { Url::from_file_path(&*dl_path()).ok().unwrap() }
-pub fn alt_registry_path() -> PathBuf { paths::root().join("alternative-registry") }
-pub fn alt_registry() -> Url { Url::from_file_path(&*alt_registry_path()).ok().unwrap() }
-pub fn alt_dl_path() -> PathBuf { paths::root().join("alt_dl") }
+pub fn registry_path() -> PathBuf {
+ paths::root().join("registry")
+}
+pub fn registry() -> Url {
+ Url::from_file_path(&*registry_path()).ok().unwrap()
+}
+pub fn dl_path() -> PathBuf {
+ paths::root().join("dl")
+}
+pub fn dl_url() -> Url {
+ Url::from_file_path(&*dl_path()).ok().unwrap()
+}
+pub fn alt_registry_path() -> PathBuf {
+ paths::root().join("alternative-registry")
+}
+pub fn alt_registry() -> Url {
+ Url::from_file_path(&*alt_registry_path()).ok().unwrap()
+}
+pub fn alt_dl_path() -> PathBuf {
+ paths::root().join("alt_dl")
+}
pub fn alt_dl_url() -> String {
let base = Url::from_file_path(&*alt_dl_path()).ok().unwrap();
format!("{}/{{crate}}/{{version}}/{{crate}}-{{version}}.crate", base)
}
-pub fn alt_api_path() -> PathBuf { paths::root().join("alt_api") }
-pub fn alt_api_url() -> Url { Url::from_file_path(&*alt_api_path()).ok().unwrap() }
+pub fn alt_api_path() -> PathBuf {
+ paths::root().join("alt_api")
+}
+pub fn alt_api_url() -> Url {
+ Url::from_file_path(&*alt_api_path()).ok().unwrap()
+}
pub struct Package {
name: String,
let config = paths::home().join(".cargo/config");
t!(fs::create_dir_all(config.parent().unwrap()));
if fs::metadata(&config).is_ok() {
- return
+ return;
}
- t!(t!(File::create(&config)).write_all(format!(r#"
+ t!(t!(File::create(&config)).write_all(
+ format!(
+ r#"
[registry]
token = "api-token"
[registries.alternative]
index = '{alt}'
- "#, reg = registry(), alt = alt_registry()).as_bytes()));
+ "#,
+ reg = registry(),
+ alt = alt_registry()
+ ).as_bytes()
+ ));
// Init a new registry
let _ = repo(®istry_path())
- .file("config.json", &format!(r#"
+ .file(
+ "config.json",
+ &format!(
+ r#"
{{"dl":"{0}","api":"{0}"}}
- "#, dl_url()))
+ "#,
+ dl_url()
+ ),
+ )
.build();
fs::create_dir_all(dl_path().join("api/v1/crates")).unwrap();
// Init an alt registry
repo(&alt_registry_path())
- .file("config.json", &format!(r#"
+ .file(
+ "config.json",
+ &format!(
+ r#"
{{"dl":"{}","api":"{}"}}
- "#, alt_dl_url(), alt_api_url()))
+ "#,
+ alt_dl_url(),
+ alt_api_url()
+ ),
+ )
.build();
fs::create_dir_all(alt_api_path().join("api/v1/crates")).unwrap();
}
}
pub fn extra_file(&mut self, name: &str, contents: &str) -> &mut Package {
- self.extra_files.push((name.to_string(), contents.to_string()));
+ self.extra_files
+ .push((name.to_string(), contents.to_string()));
self
}
self.full_dep(name, vers, None, "normal", &[], None)
}
- pub fn feature_dep(&mut self,
- name: &str,
- vers: &str,
- features: &[&str]) -> &mut Package {
+ pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package {
self.full_dep(name, vers, None, "normal", features, None)
}
- pub fn target_dep(&mut self,
- name: &str,
- vers: &str,
- target: &str) -> &mut Package {
+ pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package {
self.full_dep(name, vers, Some(target), "normal", &[], None)
}
- pub fn registry_dep(&mut self,
- name: &str,
- vers: &str,
- registry: &str) -> &mut Package {
+ pub fn registry_dep(&mut self, name: &str, vers: &str, registry: &str) -> &mut Package {
self.full_dep(name, vers, None, "normal", &[], Some(registry))
}
self.full_dep(name, vers, None, "dev", &[], None)
}
- fn full_dep(&mut self,
- name: &str,
- vers: &str,
- target: Option<&str>,
- kind: &str,
- features: &[&str],
- registry: Option<&str>) -> &mut Package {
+ fn full_dep(
+ &mut self,
+ name: &str,
+ vers: &str,
+ target: Option<&str>,
+ kind: &str,
+ features: &[&str],
+ registry: Option<&str>,
+ ) -> &mut Package {
self.deps.push(Dependency {
name: name.to_string(),
vers: vers.to_string(),
self.make_archive();
// Figure out what we're going to write into the index
- let deps = self.deps.iter().map(|dep| {
- json!({
+ let deps = self.deps
+ .iter()
+ .map(|dep| {
+ json!({
"name": dep.name,
"req": dep.vers,
"features": dep.features,
"kind": dep.kind,
"registry": dep.registry,
})
- }).collect::<Vec<_>>();
+ })
+ .collect::<Vec<_>>();
let cksum = {
let mut c = Vec::new();
t!(t!(File::open(&self.archive_dst())).read_to_end(&mut c));
_ => format!("{}/{}/{}", &self.name[0..2], &self.name[2..4], self.name),
};
- let registry_path = if self.alternative { alt_registry_path() } else { registry_path() };
+ let registry_path = if self.alternative {
+ alt_registry_path()
+ } else {
+ registry_path()
+ };
// Write file/line in the index
let dst = if self.local {
let mut prev = String::new();
let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev));
t!(fs::create_dir_all(dst.parent().unwrap()));
- t!(t!(File::create(&dst))
- .write_all((prev + &line[..] + "\n").as_bytes()));
+ t!(t!(File::create(&dst)).write_all((prev + &line[..] + "\n").as_bytes()));
// Add the new file to the index
if !self.local {
let sig = t!(repo.signature());
let parent = t!(repo.refname_to_id("refs/heads/master"));
let parent = t!(repo.find_commit(parent));
- t!(repo.commit(Some("HEAD"), &sig, &sig,
- "Another commit", &tree,
- &[&parent]));
+ t!(repo.commit(
+ Some("HEAD"),
+ &sig,
+ &sig,
+ "Another commit",
+ &tree,
+ &[&parent]
+ ));
}
- return cksum
+ return cksum;
}
fn make_archive(&self) {
- let mut manifest = format!(r#"
+ let mut manifest = format!(
+ r#"
[package]
name = "{}"
version = "{}"
authors = []
- "#, self.name, self.vers);
+ "#,
+ self.name, self.vers
+ );
for dep in self.deps.iter() {
let target = match dep.target {
None => String::new(),
let kind = match &dep.kind[..] {
"build" => "build-",
"dev" => "dev-",
- _ => ""
+ _ => "",
};
- manifest.push_str(&format!(r#"
+ manifest.push_str(&format!(
+ r#"
[{}{}dependencies.{}]
version = "{}"
- "#, target, kind, dep.name, dep.vers));
+ "#,
+ target, kind, dep.name, dep.vers
+ ));
}
let dst = self.archive_dst();
t!(fs::create_dir_all(dst.parent().unwrap()));
let f = t!(File::create(&dst));
- let mut a =
- Builder::new(GzEncoder::new(f, Compression::default()));
+ let mut a = Builder::new(GzEncoder::new(f, Compression::default()));
self.append(&mut a, "Cargo.toml", &manifest);
if self.files.is_empty() {
self.append(&mut a, "src/lib.rs", "");
}
fn append<W: Write>(&self, ar: &mut Builder<W>, file: &str, contents: &str) {
- self.append_extra(ar,
- &format!("{}-{}/{}", self.name, self.vers, file),
- contents);
+ self.append_extra(
+ ar,
+ &format!("{}-{}/{}", self.name, self.vers, file),
+ contents,
+ );
}
fn append_extra<W: Write>(&self, ar: &mut Builder<W>, path: &str, contents: &str) {
pub fn archive_dst(&self) -> PathBuf {
if self.local {
- registry_path().join(format!("{}-{}.crate", self.name,
- self.vers))
+ registry_path().join(format!("{}-{}.crate", self.name, self.vers))
} else if self.alternative {
alt_dl_path()
.join(&self.name)
use cargo::util::{Cfg, CfgExpr};
use cargotest::rustc_host;
use cargotest::support::registry::Package;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
macro_rules! c {
}
fn good<T>(s: &str, expected: T)
- where T: FromStr + PartialEq + fmt::Debug,
- T::Err: fmt::Display
+where
+ T: FromStr + PartialEq + fmt::Debug,
+ T::Err: fmt::Display,
{
let c = match T::from_str(s) {
Ok(c) => c,
}
fn bad<T>(s: &str, err: &str)
- where T: FromStr + fmt::Display, T::Err: fmt::Display
+where
+ T: FromStr + fmt::Display,
+ T::Err: fmt::Display,
{
let e = match T::from_str(s) {
Ok(cfg) => panic!("expected `{}` to not parse but got {}", s, cfg),
Err(e) => e.to_string(),
};
- assert!(e.contains(err), "when parsing `{}`,\n\"{}\" not contained \
- inside: {}", s, err, e);
+ assert!(
+ e.contains(err),
+ "when parsing `{}`,\n\"{}\" not contained \
+ inside: {}",
+ s,
+ err,
+ e
+ );
}
#[test]
#[test]
fn cfg_easy() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
b = { path = 'b' }
[target."cfg(windows)".dependencies]
b = { path = 'b' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "extern crate b;")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn dont_include() {
- let other_family = if cfg!(unix) {"windows"} else {"unix"};
+ let other_family = if cfg!(unix) { "windows" } else { "unix" };
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "a"
version = "0.0.1"
[target.'cfg({})'.dependencies]
b = {{ path = 'b' }}
- "#, other_family))
+ "#,
+ other_family
+ ),
+ )
.file("src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn works_through_the_registry() {
Package::new("foo", "0.1.0").publish();
Package::new("bar", "0.1.0")
- .target_dep("foo", "0.1.0", "cfg(unix)")
- .target_dep("foo", "0.1.0", "cfg(windows)")
- .publish();
+ .target_dep("foo", "0.1.0", "cfg(unix)")
+ .target_dep("foo", "0.1.0", "cfg(windows)")
+ .publish();
let p = project("a")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[dependencies]
bar = "0.1.0"
- "#)
- .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate bar;")
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate bar;",
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry [..]
[DOWNLOADING] [..]
[DOWNLOADING] [..]
[COMPILING] bar v0.1.0
[COMPILING] a v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn ignore_version_from_other_platform() {
- let this_family = if cfg!(unix) {"unix"} else {"windows"};
- let other_family = if cfg!(unix) {"windows"} else {"unix"};
+ let this_family = if cfg!(unix) { "unix" } else { "windows" };
+ let other_family = if cfg!(unix) { "windows" } else { "unix" };
Package::new("foo", "0.1.0").publish();
Package::new("foo", "0.2.0").publish();
let p = project("a")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "a"
version = "0.0.1"
[target.'cfg({})'.dependencies]
foo = "0.2.0"
- "#, this_family, other_family))
- .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate foo;")
+ "#,
+ this_family, other_family
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate foo;",
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry [..]
[DOWNLOADING] [..]
[COMPILING] foo v0.1.0
[COMPILING] a v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn bad_target_spec() {
let p = project("a")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[target.'cfg(4)'.dependencies]
bar = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Caused by:
unexpected character in cfg `4`, [..]
-"));
+",
+ ),
+ );
}
#[test]
fn bad_target_spec2() {
let p = project("a")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[target.'cfg(foo =)'.dependencies]
bar = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Caused by:
expected a string, found nothing
-"));
+",
+ ),
+ );
}
#[test]
fn multiple_match_ok() {
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "a"
version = "0.0.1"
[target.{}.dependencies]
b = {{ path = 'b' }}
- "#, rustc_host()))
+ "#,
+ rustc_host()
+ ),
+ )
.file("src/lib.rs", "extern crate b;")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn any_ok() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[target."cfg(any(windows, unix))".dependencies]
b = { path = 'b' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "extern crate b;")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn check_success() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
::bar::baz();
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("check"),
- execs().with_status(0));
+ assert_that(foo.cargo("check"), execs().with_status(0));
}
#[test]
fn check_fail() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
::bar::baz(42);
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("check"),
- execs().with_status(101));
+ assert_that(foo.cargo("check"), execs().with_status(101));
}
#[test]
fn custom_derive() {
if !is_nightly() {
- return
+ return;
}
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(proc_macro)]
#[macro_use]
let a = A;
a.b();
}
-"#)
+"#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
[lib]
proc-macro = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(proc_macro, proc_macro_lib)]
#![crate_type = "proc-macro"]
pub fn derive(_input: TokenStream) -> TokenStream {
format!("impl B for A {{ fn b(&self) {{}} }}").parse().unwrap()
}
-"#)
+"#,
+ )
.build();
- assert_that(foo.cargo("check"),
- execs().with_status(0));
+ assert_that(foo.cargo("check"), execs().with_status(0));
}
#[test]
fn check_build() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
::bar::baz();
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("check"),
- execs().with_status(0));
- assert_that(foo.cargo("build"),
- execs().with_status(0));
+ assert_that(foo.cargo("check"), execs().with_status(0));
+ assert_that(foo.cargo("build"), execs().with_status(0));
}
#[test]
fn build_check() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
::bar::baz();
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("build"),
- execs().with_status(0));
- assert_that(foo.cargo("check"),
- execs().with_status(0));
+ assert_that(foo.cargo("build"), execs().with_status(0));
+ assert_that(foo.cargo("check"), execs().with_status(0));
}
// Checks that where a project has both a lib and a bin, the lib is only checked
#[test]
fn issue_3418() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[dependencies]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(foo.cargo("check").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("[..] --emit=dep-info,metadata [..]"));
+ assert_that(
+ foo.cargo("check").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] --emit=dep-info,metadata [..]"),
+ );
}
// Some weirdness that seems to be caused by a crate being built as well as
#[test]
fn issue_3419() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
rustc-serialize = "*"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate rustc_serialize;
use rustc_serialize::Decodable;
pub fn take<T: Decodable>() {}
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate rustc_serialize;
extern crate foo;
fn main() {
foo::take::<Foo>();
}
- "#)
+ "#,
+ )
.build();
Package::new("rustc-serialize", "1.0.0")
- .file("src/lib.rs",
- r#"pub trait Decodable: Sized {
+ .file(
+ "src/lib.rs",
+ r#"pub trait Decodable: Sized {
fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error>;
}
pub trait Decoder {
fn read_struct<T, F>(&mut self, s_name: &str, len: usize, f: F)
-> Result<T, Self::Error>
where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
- } "#).publish();
+ } "#,
+ )
+ .publish();
- assert_that(p.cargo("check"),
- execs().with_status(0));
+ assert_that(p.cargo("check"), execs().with_status(0));
}
// Check on a dylib should have a different metadata hash than build.
#[test]
fn dylib_check_preserves_build_cache() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
crate-type = ["dylib"]
[dependencies]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[..]Compiling foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
- assert_that(p.cargo("check"),
- execs().with_status(0));
+ assert_that(p.cargo("check"), execs().with_status(0));
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
// test `cargo rustc --profile check`
#[test]
fn rustc_check() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
::bar::baz();
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("rustc")
- .arg("--profile")
- .arg("check")
- .arg("--")
- .arg("--emit=metadata"),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("rustc")
+ .arg("--profile")
+ .arg("check")
+ .arg("--")
+ .arg("--emit=metadata"),
+ execs().with_status(0),
+ );
}
#[test]
fn rustc_check_err() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
::bar::qux();
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("rustc")
- .arg("--profile")
- .arg("check")
- .arg("--")
- .arg("--emit=metadata"),
- execs().with_status(101));
+ assert_that(
+ foo.cargo("rustc")
+ .arg("--profile")
+ .arg("check")
+ .arg("--")
+ .arg("--emit=metadata"),
+ execs().with_status(101),
+ );
}
#[test]
fn check_all() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[workspace]
[dependencies]
b = { path = "b" }
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("tests/a.rs", "")
.file("src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("b/src/main.rs", "fn main() {}")
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("check").arg("--all").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..]")
- .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..]")
- .with_stderr_contains("[..] --crate-name b b[/]src[/]lib.rs [..]")
- .with_stderr_contains("[..] --crate-name b b[/]src[/]main.rs [..]")
- );
+ assert_that(
+ p.cargo("check").arg("--all").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..]")
+ .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..]")
+ .with_stderr_contains("[..] --crate-name b b[/]src[/]lib.rs [..]")
+ .with_stderr_contains("[..] --crate-name b b[/]src[/]main.rs [..]"),
+ );
}
#[test]
fn check_virtual_all_implied() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("check").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("[..] --crate-name foo foo[/]src[/]lib.rs [..]")
- .with_stderr_contains("[..] --crate-name bar bar[/]src[/]lib.rs [..]")
- );
+ assert_that(
+ p.cargo("check").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] --crate-name foo foo[/]src[/]lib.rs [..]")
+ .with_stderr_contains("[..] --crate-name bar bar[/]src[/]lib.rs [..]"),
+ );
}
#[test]
.file("benches/bench3.rs", "")
.build();
- assert_that(foo.cargo("check").arg("--all-targets").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..]")
- .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..]")
- .with_stderr_contains("[..] --crate-name example1 examples[/]example1.rs [..]")
- .with_stderr_contains("[..] --crate-name test2 tests[/]test2.rs [..]")
- .with_stderr_contains("[..] --crate-name bench3 benches[/]bench3.rs [..]")
- );
+ assert_that(
+ foo.cargo("check").arg("--all-targets").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..]")
+ .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..]")
+ .with_stderr_contains("[..] --crate-name example1 examples[/]example1.rs [..]")
+ .with_stderr_contains("[..] --crate-name test2 tests[/]test2.rs [..]")
+ .with_stderr_contains("[..] --crate-name bench3 benches[/]bench3.rs [..]"),
+ );
}
#[test]
fn check_unit_test_profile() {
let foo = project("foo")
.file("Cargo.toml", SIMPLE_MANIFEST)
- .file("src/lib.rs", r#"
+ .file(
+ "src/lib.rs",
+ r#"
#[cfg(test)]
mod tests {
#[test]
badtext
}
}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("check"),
- execs().with_status(0));
- assert_that(foo.cargo("check").arg("--profile").arg("test"),
- execs().with_status(101)
- .with_stderr_contains("[..]badtext[..]"));
+ assert_that(foo.cargo("check"), execs().with_status(0));
+ assert_that(
+ foo.cargo("check").arg("--profile").arg("test"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains("[..]badtext[..]"),
+ );
}
// Verify what is checked with various command-line filters.
fn check_filters() {
let p = project("foo")
.file("Cargo.toml", SIMPLE_MANIFEST)
- .file("src/lib.rs", r#"
+ .file(
+ "src/lib.rs",
+ r#"
fn unused_normal_lib() {}
#[cfg(test)]
mod tests {
fn unused_unit_lib() {}
}
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
fn unused_normal_bin() {}
#[cfg(test)]
mod tests {
fn unused_unit_bin() {}
}
- "#)
- .file("tests/t1.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/t1.rs",
+ r#"
fn unused_normal_t1() {}
#[cfg(test)]
mod tests {
fn unused_unit_t1() {}
}
- "#)
- .file("examples/ex1.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/ex1.rs",
+ r#"
fn main() {}
fn unused_normal_ex1() {}
#[cfg(test)]
mod tests {
fn unused_unit_ex1() {}
}
- "#)
- .file("benches/b1.rs", r#"
+ "#,
+ )
+ .file(
+ "benches/b1.rs",
+ r#"
fn unused_normal_b1() {}
#[cfg(test)]
mod tests {
fn unused_unit_b1() {}
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("check"),
- execs().with_status(0)
- .with_stderr_contains("[..]unused_normal_lib[..]")
- .with_stderr_contains("[..]unused_normal_bin[..]")
- .with_stderr_does_not_contain("unused_normal_t1")
- .with_stderr_does_not_contain("unused_normal_ex1")
- .with_stderr_does_not_contain("unused_normal_b1")
- .with_stderr_does_not_contain("unused_unit_"));
+ assert_that(
+ p.cargo("check"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..]unused_normal_lib[..]")
+ .with_stderr_contains("[..]unused_normal_bin[..]")
+ .with_stderr_does_not_contain("unused_normal_t1")
+ .with_stderr_does_not_contain("unused_normal_ex1")
+ .with_stderr_does_not_contain("unused_normal_b1")
+ .with_stderr_does_not_contain("unused_unit_"),
+ );
p.root().join("target").rm_rf();
- assert_that(p.cargo("check").arg("--tests").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..] --test [..]")
- .with_stderr_contains("[..] --crate-name foo src[/]lib.rs --crate-type lib [..]")
- .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..] --test [..]")
- .with_stderr_contains("[..] --crate-name foo src[/]main.rs --crate-type bin [..]")
- .with_stderr_contains("[..]unused_unit_lib[..]")
- .with_stderr_contains("[..]unused_unit_bin[..]")
- .with_stderr_contains("[..]unused_normal_lib[..]")
- .with_stderr_contains("[..]unused_normal_bin[..]")
- .with_stderr_contains("[..]unused_unit_t1[..]")
- .with_stderr_contains("[..]unused_normal_ex1[..]")
- .with_stderr_contains("[..]unused_unit_ex1[..]")
- .with_stderr_does_not_contain("unused_normal_b1")
- .with_stderr_does_not_contain("unused_unit_b1"));
+ assert_that(
+ p.cargo("check").arg("--tests").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..] --test [..]")
+ .with_stderr_contains("[..] --crate-name foo src[/]lib.rs --crate-type lib [..]")
+ .with_stderr_contains("[..] --crate-name foo src[/]main.rs [..] --test [..]")
+ .with_stderr_contains("[..] --crate-name foo src[/]main.rs --crate-type bin [..]")
+ .with_stderr_contains("[..]unused_unit_lib[..]")
+ .with_stderr_contains("[..]unused_unit_bin[..]")
+ .with_stderr_contains("[..]unused_normal_lib[..]")
+ .with_stderr_contains("[..]unused_normal_bin[..]")
+ .with_stderr_contains("[..]unused_unit_t1[..]")
+ .with_stderr_contains("[..]unused_normal_ex1[..]")
+ .with_stderr_contains("[..]unused_unit_ex1[..]")
+ .with_stderr_does_not_contain("unused_normal_b1")
+ .with_stderr_does_not_contain("unused_unit_b1"),
+ );
p.root().join("target").rm_rf();
- assert_that(p.cargo("check").arg("--test").arg("t1").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("[..]unused_normal_lib[..]")
- .with_stderr_contains("[..]unused_normal_bin[..]")
- .with_stderr_contains("[..]unused_unit_t1[..]")
- .with_stderr_does_not_contain("unused_unit_lib")
- .with_stderr_does_not_contain("unused_unit_bin")
- .with_stderr_does_not_contain("unused_normal_ex1")
- .with_stderr_does_not_contain("unused_normal_b1")
- .with_stderr_does_not_contain("unused_unit_ex1")
- .with_stderr_does_not_contain("unused_unit_b1"));
+ assert_that(
+ p.cargo("check").arg("--test").arg("t1").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..]unused_normal_lib[..]")
+ .with_stderr_contains("[..]unused_normal_bin[..]")
+ .with_stderr_contains("[..]unused_unit_t1[..]")
+ .with_stderr_does_not_contain("unused_unit_lib")
+ .with_stderr_does_not_contain("unused_unit_bin")
+ .with_stderr_does_not_contain("unused_normal_ex1")
+ .with_stderr_does_not_contain("unused_normal_b1")
+ .with_stderr_does_not_contain("unused_unit_ex1")
+ .with_stderr_does_not_contain("unused_unit_b1"),
+ );
p.root().join("target").rm_rf();
- assert_that(p.cargo("check").arg("--all-targets").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("[..]unused_normal_lib[..]")
- .with_stderr_contains("[..]unused_normal_bin[..]")
- .with_stderr_contains("[..]unused_normal_t1[..]")
- .with_stderr_contains("[..]unused_normal_ex1[..]")
- .with_stderr_contains("[..]unused_normal_b1[..]")
- .with_stderr_contains("[..]unused_unit_b1[..]")
- .with_stderr_contains("[..]unused_unit_t1[..]")
- .with_stderr_contains("[..]unused_unit_lib[..]")
- .with_stderr_contains("[..]unused_unit_bin[..]")
- .with_stderr_contains("[..]unused_unit_ex1[..]"));
+ assert_that(
+ p.cargo("check").arg("--all-targets").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..]unused_normal_lib[..]")
+ .with_stderr_contains("[..]unused_normal_bin[..]")
+ .with_stderr_contains("[..]unused_normal_t1[..]")
+ .with_stderr_contains("[..]unused_normal_ex1[..]")
+ .with_stderr_contains("[..]unused_normal_b1[..]")
+ .with_stderr_contains("[..]unused_unit_b1[..]")
+ .with_stderr_contains("[..]unused_unit_t1[..]")
+ .with_stderr_contains("[..]unused_unit_lib[..]")
+ .with_stderr_contains("[..]unused_unit_bin[..]")
+ .with_stderr_contains("[..]unused_unit_ex1[..]"),
+ );
}
#[test]
-fn check_artifacts()
-{
+fn check_artifacts() {
// Verify which artifacts are created when running check (#4059).
let p = project("foo")
.file("Cargo.toml", SIMPLE_MANIFEST)
.file("benches/b1.rs", "")
.build();
assert_that(p.cargo("check"), execs().with_status(0));
- assert_that(&p.root().join("target/debug/libfoo.rmeta"),
- existing_file());
- assert_that(&p.root().join("target/debug/libfoo.rlib"),
- is_not(existing_file()));
- assert_that(&p.root().join("target/debug").join(exe("foo")),
- is_not(existing_file()));
+ assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file());
+ assert_that(
+ &p.root().join("target/debug/libfoo.rlib"),
+ is_not(existing_file()),
+ );
+ assert_that(
+ &p.root().join("target/debug").join(exe("foo")),
+ is_not(existing_file()),
+ );
p.root().join("target").rm_rf();
assert_that(p.cargo("check").arg("--lib"), execs().with_status(0));
- assert_that(&p.root().join("target/debug/libfoo.rmeta"),
- existing_file());
- assert_that(&p.root().join("target/debug/libfoo.rlib"),
- is_not(existing_file()));
- assert_that(&p.root().join("target/debug").join(exe("foo")),
- is_not(existing_file()));
+ assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file());
+ assert_that(
+ &p.root().join("target/debug/libfoo.rlib"),
+ is_not(existing_file()),
+ );
+ assert_that(
+ &p.root().join("target/debug").join(exe("foo")),
+ is_not(existing_file()),
+ );
p.root().join("target").rm_rf();
- assert_that(p.cargo("check").arg("--bin").arg("foo"),
- execs().with_status(0));
- assert_that(&p.root().join("target/debug/libfoo.rmeta"),
- existing_file());
- assert_that(&p.root().join("target/debug/libfoo.rlib"),
- is_not(existing_file()));
- assert_that(&p.root().join("target/debug").join(exe("foo")),
- is_not(existing_file()));
+ assert_that(
+ p.cargo("check").arg("--bin").arg("foo"),
+ execs().with_status(0),
+ );
+ assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file());
+ assert_that(
+ &p.root().join("target/debug/libfoo.rlib"),
+ is_not(existing_file()),
+ );
+ assert_that(
+ &p.root().join("target/debug").join(exe("foo")),
+ is_not(existing_file()),
+ );
p.root().join("target").rm_rf();
- assert_that(p.cargo("check").arg("--test").arg("t1"),
- execs().with_status(0));
- assert_that(&p.root().join("target/debug/libfoo.rmeta"),
- existing_file());
- assert_that(&p.root().join("target/debug/libfoo.rlib"),
- is_not(existing_file()));
- assert_that(&p.root().join("target/debug").join(exe("foo")),
- is_not(existing_file()));
- assert_eq!(glob(&p.root().join("target/debug/t1-*").to_str().unwrap())
- .unwrap().count(), 0);
+ assert_that(
+ p.cargo("check").arg("--test").arg("t1"),
+ execs().with_status(0),
+ );
+ assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file());
+ assert_that(
+ &p.root().join("target/debug/libfoo.rlib"),
+ is_not(existing_file()),
+ );
+ assert_that(
+ &p.root().join("target/debug").join(exe("foo")),
+ is_not(existing_file()),
+ );
+ assert_eq!(
+ glob(&p.root().join("target/debug/t1-*").to_str().unwrap())
+ .unwrap()
+ .count(),
+ 0
+ );
p.root().join("target").rm_rf();
- assert_that(p.cargo("check").arg("--example").arg("ex1"),
- execs().with_status(0));
- assert_that(&p.root().join("target/debug/libfoo.rmeta"),
- existing_file());
- assert_that(&p.root().join("target/debug/libfoo.rlib"),
- is_not(existing_file()));
- assert_that(&p.root().join("target/debug/examples").join(exe("ex1")),
- is_not(existing_file()));
+ assert_that(
+ p.cargo("check").arg("--example").arg("ex1"),
+ execs().with_status(0),
+ );
+ assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file());
+ assert_that(
+ &p.root().join("target/debug/libfoo.rlib"),
+ is_not(existing_file()),
+ );
+ assert_that(
+ &p.root().join("target/debug/examples").join(exe("ex1")),
+ is_not(existing_file()),
+ );
p.root().join("target").rm_rf();
- assert_that(p.cargo("check").arg("--bench").arg("b1"),
- execs().with_status(0));
- assert_that(&p.root().join("target/debug/libfoo.rmeta"),
- existing_file());
- assert_that(&p.root().join("target/debug/libfoo.rlib"),
- is_not(existing_file()));
- assert_that(&p.root().join("target/debug").join(exe("foo")),
- is_not(existing_file()));
- assert_eq!(glob(&p.root().join("target/debug/b1-*").to_str().unwrap())
- .unwrap().count(), 0);
+ assert_that(
+ p.cargo("check").arg("--bench").arg("b1"),
+ execs().with_status(0),
+ );
+ assert_that(&p.root().join("target/debug/libfoo.rmeta"), existing_file());
+ assert_that(
+ &p.root().join("target/debug/libfoo.rlib"),
+ is_not(existing_file()),
+ );
+ assert_that(
+ &p.root().join("target/debug").join(exe("foo")),
+ is_not(existing_file()),
+ );
+ assert_eq!(
+ glob(&p.root().join("target/debug/b1-*").to_str().unwrap())
+ .unwrap()
+ .count(),
+ 0
+ );
}
use std::env;
-use cargotest::support::{git, project, execs, main_file, basic_bin_manifest};
+use cargotest::support::{basic_bin_manifest, execs, git, main_file, project};
use cargotest::support::registry::Package;
use hamcrest::{assert_that, existing_dir, existing_file, is_not};
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.build_dir(), existing_dir());
- assert_that(p.cargo("clean"),
- execs().with_status(0));
+ assert_that(p.cargo("clean"), execs().with_status(0));
assert_that(&p.build_dir(), is_not(existing_dir()));
}
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.build_dir(), existing_dir());
- assert_that(p.cargo("clean").cwd(&p.root().join("src")),
- execs().with_status(0).with_stdout(""));
+ assert_that(
+ p.cargo("clean").cwd(&p.root().join("src")),
+ execs().with_status(0).with_stdout(""),
+ );
assert_that(&p.build_dir(), is_not(existing_dir()));
}
#[test]
fn clean_multiple_packages() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name = "foo"
- "#)
+ "#,
+ )
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
- .file("d1/Cargo.toml", r#"
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
[[bin]]
name = "d1"
- "#)
+ "#,
+ )
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
- .file("d2/Cargo.toml", r#"
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.1"
[[bin]]
name = "d2"
- "#)
+ "#,
+ )
.file("d2/src/main.rs", "fn main() { println!(\"d2\"); }")
.build();
- assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("d2")
- .arg("-p").arg("foo"),
- execs().with_status(0));
-
- let d1_path = &p.build_dir().join("debug")
- .join(format!("d1{}", env::consts::EXE_SUFFIX));
- let d2_path = &p.build_dir().join("debug")
- .join(format!("d2{}", env::consts::EXE_SUFFIX));
-
+ assert_that(
+ p.cargo("build")
+ .arg("-p")
+ .arg("d1")
+ .arg("-p")
+ .arg("d2")
+ .arg("-p")
+ .arg("foo"),
+ execs().with_status(0),
+ );
+
+ let d1_path = &p.build_dir()
+ .join("debug")
+ .join(format!("d1{}", env::consts::EXE_SUFFIX));
+ let d2_path = &p.build_dir()
+ .join("debug")
+ .join(format!("d2{}", env::consts::EXE_SUFFIX));
assert_that(&p.bin("foo"), existing_file());
assert_that(d1_path, existing_file());
assert_that(d2_path, existing_file());
- assert_that(p.cargo("clean").arg("-p").arg("d1").arg("-p").arg("d2")
- .cwd(&p.root().join("src")),
- execs().with_status(0).with_stdout(""));
+ assert_that(
+ p.cargo("clean")
+ .arg("-p")
+ .arg("d1")
+ .arg("-p")
+ .arg("d2")
+ .cwd(&p.root().join("src")),
+ execs().with_status(0).with_stdout(""),
+ );
assert_that(&p.bin("foo"), existing_file());
assert_that(d1_path, is_not(existing_file()));
assert_that(d2_path, is_not(existing_file()));
#[test]
fn clean_release() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
a = { path = "a" }
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("--release"),
- execs().with_status(0));
-
- assert_that(p.cargo("clean").arg("-p").arg("foo"),
- execs().with_status(0));
- assert_that(p.cargo("build").arg("--release"),
- execs().with_status(0).with_stdout(""));
-
- assert_that(p.cargo("clean").arg("-p").arg("foo").arg("--release"),
- execs().with_status(0));
- assert_that(p.cargo("build").arg("--release"),
- execs().with_status(0).with_stderr("\
+ assert_that(p.cargo("build").arg("--release"), execs().with_status(0));
+
+ assert_that(
+ p.cargo("clean").arg("-p").arg("foo"),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build").arg("--release"),
+ execs().with_status(0).with_stdout(""),
+ );
+
+ assert_that(
+ p.cargo("clean").arg("-p").arg("foo").arg("--release"),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build").arg("--release"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] release [optimized] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn build_script() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::path::PathBuf;
use std::env;
assert!(!std::fs::metadata(out.join("out")).is_ok());
}
}
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").env("FIRST", "1"),
- execs().with_status(0));
- assert_that(p.cargo("clean").arg("-p").arg("foo"),
- execs().with_status(0));
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(p.cargo("build").env("FIRST", "1"), execs().with_status(0));
+ assert_that(
+ p.cargo("clean").arg("-p").arg("foo"),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] build.rs [..]`
[RUNNING] `[..]build-script-build`
[RUNNING] `rustc [..] src[/]main.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn clean_git() {
let git = git::new("dep", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep"
version = "0.5.0"
authors = []
- "#)
- .file("src/lib.rs", "")
+ "#,
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
dep = {{ git = '{}' }}
- "#, git.url()))
+ "#,
+ git.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(p.cargo("clean").arg("-p").arg("dep"),
- execs().with_status(0).with_stdout(""));
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(
+ p.cargo("clean").arg("-p").arg("dep"),
+ execs().with_status(0).with_stdout(""),
+ );
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn registry() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.1.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(p.cargo("clean").arg("-p").arg("bar"),
- execs().with_status(0).with_stdout(""));
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(
+ p.cargo("clean").arg("-p").arg("bar"),
+ execs().with_status(0).with_stdout(""),
+ );
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
-fn clean_verbose(){
+fn clean_verbose() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "0.1"
- "#)
- .file("src/main.rs", "fn main() {}")
- .build();
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
Package::new("bar", "0.1.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(p.cargo("clean").arg("-p").arg("bar").arg("--verbose"),
- execs().with_status(0).with_stderr("\
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(
+ p.cargo("clean").arg("-p").arg("bar").arg("--verbose"),
+ execs().with_status(0).with_stderr(
+ "\
[REMOVING] [..]
[REMOVING] [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0));
+",
+ ),
+ );
+ assert_that(p.cargo("build"), execs().with_status(0));
}
use git2;
use cargotest;
-use cargotest::install::{has_installed_exe, cargo_home};
+use cargotest::install::{cargo_home, has_installed_exe};
use cargotest::support::git;
use cargotest::support::registry::Package;
use cargotest::support::{execs, project};
#[test]
fn multiple_installs() {
let p = project("foo")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
- "#)
+ "#,
+ )
.file("a/src/main.rs", "fn main() {}")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "bar"
authors = []
version = "0.0.0"
- "#)
+ "#,
+ )
.file("b/src/main.rs", "fn main() {}");
let p = p.build();
pkg("foo", "0.0.1");
pkg("bar", "0.0.1");
- let mut a = cargotest::cargo_process().arg("install").arg("foo").build_command();
- let mut b = cargotest::cargo_process().arg("install").arg("bar").build_command();
+ let mut a = cargotest::cargo_process()
+ .arg("install")
+ .arg("foo")
+ .build_command();
+ let mut b = cargotest::cargo_process()
+ .arg("install")
+ .arg("bar")
+ .build_command();
a.stdout(Stdio::piped()).stderr(Stdio::piped());
b.stdout(Stdio::piped()).stderr(Stdio::piped());
#[test]
fn one_install_should_be_bad() {
let p = project("foo")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
- "#)
+ "#,
+ )
.file("a/src/main.rs", "fn main() {}")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
- "#)
+ "#,
+ )
.file("b/src/main.rs", "fn main() {}");
let p = p.build();
let b = b.wait_with_output().unwrap();
let a = a.join().unwrap();
- let (bad, good) = if a.status.code() == Some(101) {(a, b)} else {(b, a)};
- assert_that(bad, execs().with_status(101).with_stderr_contains("\
+ let (bad, good) = if a.status.code() == Some(101) {
+ (a, b)
+ } else {
+ (b, a)
+ };
+ assert_that(
+ bad,
+ execs().with_status(101).with_stderr_contains(
+ "\
[ERROR] binary `foo[..]` already exists in destination as part of `[..]`
-"));
- assert_that(good, execs().with_status(0).with_stderr_contains("\
+",
+ ),
+ );
+ assert_that(
+ good,
+ execs().with_status(0).with_stderr_contains(
+ "\
warning: be sure to add `[..]` to your PATH [..]
-"));
+",
+ ),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
}
pkg.publish();
let p = project("foo")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("a/src/main.rs", "fn main() {}")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "bar"
authors = []
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("b/src/main.rs", "fn main() {}");
let p = p.build();
assert_that(b, execs().with_status(0));
let suffix = env::consts::EXE_SUFFIX;
- assert_that(&p.root().join("a/target/debug").join(format!("foo{}", suffix)),
- existing_file());
- assert_that(&p.root().join("b/target/debug").join(format!("bar{}", suffix)),
- existing_file());
+ assert_that(
+ &p.root()
+ .join("a/target/debug")
+ .join(format!("foo{}", suffix)),
+ existing_file(),
+ );
+ assert_that(
+ &p.root()
+ .join("b/target/debug")
+ .join(format!("bar{}", suffix)),
+ existing_file(),
+ );
}
#[test]
fn git_same_repo_different_tags() {
let a = git::new("dep", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep"
version = "0.5.0"
authors = []
- "#).file("src/lib.rs", "pub fn tag1() {}")
+ "#,
+ )
+ .file("src/lib.rs", "pub fn tag1() {}")
}).unwrap();
let repo = git2::Repository::open(&a.root()).unwrap();
git::tag(&repo, "tag1");
- File::create(a.root().join("src/lib.rs")).unwrap()
- .write_all(b"pub fn tag2() {}").unwrap();
+ File::create(a.root().join("src/lib.rs"))
+ .unwrap()
+ .write_all(b"pub fn tag2() {}")
+ .unwrap();
git::add(&repo);
git::commit(&repo);
git::tag(&repo, "tag2");
let p = project("foo")
- .file("a/Cargo.toml", &format!(r#"
+ .file(
+ "a/Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
authors = []
[dependencies]
dep = {{ git = '{}', tag = 'tag1' }}
- "#, a.url()))
- .file("a/src/main.rs", "extern crate dep; fn main() { dep::tag1(); }")
- .file("b/Cargo.toml", &format!(r#"
+ "#,
+ a.url()
+ ),
+ )
+ .file(
+ "a/src/main.rs",
+ "extern crate dep; fn main() { dep::tag1(); }",
+ )
+ .file(
+ "b/Cargo.toml",
+ &format!(
+ r#"
[package]
name = "bar"
authors = []
[dependencies]
dep = {{ git = '{}', tag = 'tag2' }}
- "#, a.url()))
- .file("b/src/main.rs", "extern crate dep; fn main() { dep::tag2(); }");
+ "#,
+ a.url()
+ ),
+ )
+ .file(
+ "b/src/main.rs",
+ "extern crate dep; fn main() { dep::tag2(); }",
+ );
let p = p.build();
- let mut a = p.cargo("build").arg("-v").cwd(p.root().join("a")).build_command();
- let mut b = p.cargo("build").arg("-v").cwd(p.root().join("b")).build_command();
+ let mut a = p.cargo("build")
+ .arg("-v")
+ .cwd(p.root().join("a"))
+ .build_command();
+ let mut b = p.cargo("build")
+ .arg("-v")
+ .cwd(p.root().join("b"))
+ .build_command();
a.stdout(Stdio::piped()).stderr(Stdio::piped());
b.stdout(Stdio::piped()).stderr(Stdio::piped());
#[test]
fn git_same_branch_different_revs() {
let a = git::new("dep", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep"
version = "0.5.0"
authors = []
- "#).file("src/lib.rs", "pub fn f1() {}")
+ "#,
+ )
+ .file("src/lib.rs", "pub fn f1() {}")
}).unwrap();
let p = project("foo")
- .file("a/Cargo.toml", &format!(r#"
+ .file(
+ "a/Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
authors = []
[dependencies]
dep = {{ git = '{}' }}
- "#, a.url()))
- .file("a/src/main.rs", "extern crate dep; fn main() { dep::f1(); }")
- .file("b/Cargo.toml", &format!(r#"
+ "#,
+ a.url()
+ ),
+ )
+ .file(
+ "a/src/main.rs",
+ "extern crate dep; fn main() { dep::f1(); }",
+ )
+ .file(
+ "b/Cargo.toml",
+ &format!(
+ r#"
[package]
name = "bar"
authors = []
[dependencies]
dep = {{ git = '{}' }}
- "#, a.url()))
- .file("b/src/main.rs", "extern crate dep; fn main() { dep::f2(); }");
+ "#,
+ a.url()
+ ),
+ )
+ .file(
+ "b/src/main.rs",
+ "extern crate dep; fn main() { dep::f2(); }",
+ );
let p = p.build();
// Generate a Cargo.lock pointing at the current rev, then clear out the
// target directory
- assert_that(p.cargo("build").cwd(p.root().join("a")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("a")),
+ execs().with_status(0),
+ );
fs::remove_dir_all(p.root().join("a/target")).unwrap();
// Make a new commit on the master branch
let repo = git2::Repository::open(&a.root()).unwrap();
- File::create(a.root().join("src/lib.rs")).unwrap()
- .write_all(b"pub fn f2() {}").unwrap();
+ File::create(a.root().join("src/lib.rs"))
+ .unwrap()
+ .write_all(b"pub fn f2() {}")
+ .unwrap();
git::add(&repo);
git::commit(&repo);
#[test]
fn same_project() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("src/lib.rs", "");
let p = p.build();
#[cfg_attr(target_os = "windows", ignore)]
fn killing_cargo_releases_the_lock() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::net::TcpStream;
fn main() {
std::thread::sleep(std::time::Duration::new(10, 0));
}
}
- "#);
+ "#,
+ );
let p = p.build();
// Our build script will connect to our local TCP socket to inform us that
let mut b = p.cargo("build").build_command();
a.stdout(Stdio::piped()).stderr(Stdio::piped());
b.stdout(Stdio::piped()).stderr(Stdio::piped());
- a.env("ADDR", l.local_addr().unwrap().to_string()).env("A", "a");
- b.env("ADDR", l.local_addr().unwrap().to_string()).env_remove("A");
+ a.env("ADDR", l.local_addr().unwrap().to_string())
+ .env("A", "a");
+ b.env("ADDR", l.local_addr().unwrap().to_string())
+ .env_remove("A");
// Spawn `a`, wait for it to get to the build script (at which point the
// lock is held), then kill it.
#[test]
fn debug_release_ok() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}");
let p = p.build();
let b = b.wait_with_output().unwrap();
let a = a.join().unwrap();
- assert_that(a, execs().with_status(0).with_stderr("\
+ assert_that(
+ a,
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.0 [..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(b, execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ b,
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.0 [..]
[FINISHED] release [optimized] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn no_deadlock_with_git_dependencies() {
let dep1 = git::new("dep1", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep1"
version = "0.5.0"
authors = []
- "#).file("src/lib.rs", "")
+ "#,
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let dep2 = git::new("dep2", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep2"
version = "0.5.0"
authors = []
- "#).file("src/lib.rs", "")
+ "#,
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
authors = []
[dependencies]
dep1 = {{ git = '{}' }}
dep2 = {{ git = '{}' }}
- "#, dep1.url(), dep2.url()))
+ "#,
+ dep1.url(),
+ dep2.url()
+ ),
+ )
.file("src/main.rs", "fn main() { }");
let p = p.build();
let (tx, rx) = channel();
for _ in 0..n_concurrent_builds {
- let cmd = p.cargo("build").build_command()
+ let cmd = p.cargo("build")
+ .build_command()
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn();
let recv_timeout = |chan: &::std::sync::mpsc::Receiver<_>| {
for _ in 0..3000 {
if let Ok(x) = chan.try_recv() {
- return x
+ return x;
}
thread::sleep(Duration::from_millis(10));
}
let result = recv_timeout(&rx);
assert_that(result, execs().with_status(0))
}
-
}
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn read_env_vars_for_config() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::env;
fn main() {
assert_eq!(env::var("NUM_JOBS").unwrap(), "100");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").env("CARGO_BUILD_JOBS", "100"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").env("CARGO_BUILD_JOBS", "100"),
+ execs().with_status(0),
+ );
}
use cargo::util::paths as cargopaths;
use cargotest::support::paths;
-use cargotest::support::{git, project, execs};
+use cargotest::support::{execs, git, project};
use hamcrest::assert_that;
#[test]
let project = project("foo");
let git_project = git::new("bar", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
}).unwrap();
let project = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
bar = {{ git = '{}' }}
- "#, git_project.url()))
+ "#,
+ git_project.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
let log = "cargo::sources::git=trace";
for file in files {
if !file.exists() {
- continue
+ continue;
}
println!("deleting {}", file.display());
cargopaths::remove_file(&file).unwrap();
- assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
- execs().with_status(0));
+ assert_that(
+ project.cargo("build").env("RUST_LOG", log).arg("-v"),
+ execs().with_status(0),
+ );
if !file.exists() {
- continue
+ continue;
}
println!("truncating {}", file.display());
make_writable(&file);
.unwrap()
.set_len(2)
.unwrap();
- assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
- execs().with_status(0));
+ assert_that(
+ project.cargo("build").env("RUST_LOG", log).arg("-v"),
+ execs().with_status(0),
+ );
}
}
let project = project("foo");
let git_project = git::new("bar", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
}).unwrap();
let project = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
bar = {{ git = '{}' }}
- "#, git_project.url()))
+ "#,
+ git_project.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
let log = "cargo::sources::git=trace";
for file in files {
if !file.exists() {
- continue
+ continue;
}
println!("deleting {}", file.display());
cargopaths::remove_file(&file).unwrap();
- assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
- execs().with_status(0));
+ assert_that(
+ project.cargo("build").env("RUST_LOG", log).arg("-v"),
+ execs().with_status(0),
+ );
if !file.exists() {
- continue
+ continue;
}
println!("truncating {}", file.display());
make_writable(&file);
.unwrap()
.set_len(2)
.unwrap();
- assert_that(project.cargo("build").env("RUST_LOG", log).arg("-v"),
- execs().with_status(0));
+ assert_that(
+ project.cargo("build").env("RUST_LOG", log).arg("-v"),
+ execs().with_status(0),
+ );
}
}
use cargo::util::process;
use cargotest::{is_nightly, rustc_host};
-use cargotest::support::{project, execs, basic_bin_manifest, cross_compile};
+use cargotest::support::{basic_bin_manifest, cross_compile, execs, project};
use hamcrest::{assert_that, existing_file};
#[test]
fn simple_cross() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", &format!(r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
fn main() {{
assert_eq!(std::env::var("TARGET").unwrap(), "{}");
}}
- "#, cross_compile::alternate()))
- .file("src/main.rs", &format!(r#"
+ "#,
+ cross_compile::alternate()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
use std::env;
fn main() {{
assert_eq!(env::consts::ARCH, "{}");
}}
- "#, cross_compile::alternate_arch()))
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
.build();
let target = cross_compile::alternate();
- assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--target").arg(&target).arg("-v"),
+ execs().with_status(0),
+ );
assert_that(&p.target_bin(&target, "foo"), existing_file());
- assert_that(process(&p.target_bin(&target, "foo")),
- execs().with_status(0));
+ assert_that(
+ process(&p.target_bin(&target, "foo")),
+ execs().with_status(0),
+ );
}
#[test]
fn simple_cross_config() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[build]
target = "{}"
- "#, cross_compile::alternate()))
- .file("Cargo.toml", r#"
+ "#,
+ cross_compile::alternate()
+ ),
+ )
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", &format!(r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
fn main() {{
assert_eq!(std::env::var("TARGET").unwrap(), "{}");
}}
- "#, cross_compile::alternate()))
- .file("src/main.rs", &format!(r#"
+ "#,
+ cross_compile::alternate()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
use std::env;
fn main() {{
assert_eq!(env::consts::ARCH, "{}");
}}
- "#, cross_compile::alternate_arch()))
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
.build();
let target = cross_compile::alternate();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
assert_that(&p.target_bin(&target, "foo"), existing_file());
- assert_that(process(&p.target_bin(&target, "foo")),
- execs().with_status(0));
+ assert_that(
+ process(&p.target_bin(&target, "foo")),
+ execs().with_status(0),
+ );
}
#[test]
fn simple_deps() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() { bar::bar(); }
- "#)
+ "#,
+ )
.build();
let _p2 = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn bar() {}")
.build();
let target = cross_compile::alternate();
- assert_that(p.cargo("build").arg("--target").arg(&target),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--target").arg(&target),
+ execs().with_status(0),
+ );
assert_that(&p.target_bin(&target, "foo"), existing_file());
- assert_that(process(&p.target_bin(&target, "foo")),
- execs().with_status(0));
+ assert_that(
+ process(&p.target_bin(&target, "foo")),
+ execs().with_status(0),
+ );
}
#[test]
fn plugin_deps() {
- if cross_compile::disabled() { return }
- if !is_nightly() { return }
+ if cross_compile::disabled() {
+ return;
+ }
+ if !is_nightly() {
+ return;
+ }
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.baz]
path = "../baz"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(plugin)]
#![plugin(bar)]
extern crate baz;
fn main() {
assert_eq!(bar!(), baz::baz());
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
plugin = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(plugin_registrar, quote, rustc_private)]
extern crate rustc_plugin;
-> Box<MacResult + 'static> {
MacEager::expr(cx.expr_lit(sp, LitKind::Int(1, LitIntType::Unsuffixed)))
}
- "#)
+ "#,
+ )
.build();
let _baz = project("baz")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
.build();
let target = cross_compile::alternate();
- assert_that(foo.cargo("build").arg("--target").arg(&target),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build").arg("--target").arg(&target),
+ execs().with_status(0),
+ );
assert_that(&foo.target_bin(&target, "foo"), existing_file());
- assert_that(process(&foo.target_bin(&target, "foo")),
- execs().with_status(0));
+ assert_that(
+ process(&foo.target_bin(&target, "foo")),
+ execs().with_status(0),
+ );
}
#[test]
fn plugin_to_the_max() {
- if cross_compile::disabled() { return }
- if !is_nightly() { return }
+ if cross_compile::disabled() {
+ return;
+ }
+ if !is_nightly() {
+ return;
+ }
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.baz]
path = "../baz"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(plugin)]
#![plugin(bar)]
extern crate baz;
fn main() {
assert_eq!(bar!(), baz::baz());
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies.baz]
path = "../baz"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(plugin_registrar, quote, rustc_private)]
extern crate rustc_plugin;
let path = cx.path(sp, vec![bar.clone(), bar]);
MacEager::expr(cx.expr_call(sp, cx.expr_path(path), vec![]))
}
- "#)
+ "#,
+ )
.build();
let _baz = project("baz")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
.build();
let target = cross_compile::alternate();
- assert_that(foo.cargo("build").arg("--target").arg(&target).arg("-v"),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build").arg("--target").arg(&target).arg("-v"),
+ execs().with_status(0),
+ );
println!("second");
- assert_that(foo.cargo("build").arg("-v")
- .arg("--target").arg(&target),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build").arg("-v").arg("--target").arg(&target),
+ execs().with_status(0),
+ );
assert_that(&foo.target_bin(&target, "foo"), existing_file());
- assert_that(process(&foo.target_bin(&target, "foo")),
- execs().with_status(0));
+ assert_that(
+ process(&foo.target_bin(&target, "foo")),
+ execs().with_status(0),
+ );
}
#[test]
fn linker_and_ar() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let target = cross_compile::alternate();
let p = project("foo")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}]
ar = "my-ar-tool"
linker = "my-linker-tool"
- "#, target))
+ "#,
+ target
+ ),
+ )
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/foo.rs", &format!(r#"
+ .file(
+ "src/foo.rs",
+ &format!(
+ r#"
use std::env;
fn main() {{
assert_eq!(env::consts::ARCH, "{}");
}}
- "#, cross_compile::alternate_arch()))
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
.build();
- assert_that(p.cargo("build").arg("--target").arg(&target)
- .arg("-v"),
- execs().with_status(101)
- .with_stderr_contains(&format!("\
+ assert_that(
+ p.cargo("build").arg("--target").arg(&target).arg("-v"),
+ execs().with_status(101).with_stderr_contains(&format!(
+ "\
[COMPILING] foo v0.5.0 ({url})
[RUNNING] `rustc --crate-name foo src[/]foo.rs --crate-type bin \
--emit=dep-info,link -C debuginfo=2 \
-L dependency={dir}[/]target[/]{target}[/]debug[/]deps \
-L dependency={dir}[/]target[/]debug[/]deps`
",
- dir = p.root().display(),
- url = p.url(),
- target = target,
- )));
+ dir = p.root().display(),
+ url = p.url(),
+ target = target,
+ )),
+ );
}
#[test]
fn plugin_with_extra_dylib_dep() {
- if cross_compile::disabled() { return }
- if !is_nightly() { return }
+ if cross_compile::disabled() {
+ return;
+ }
+ if !is_nightly() {
+ return;
+ }
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(plugin)]
#![plugin(bar)]
fn main() {}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies.baz]
path = "../baz"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(plugin_registrar, rustc_private)]
extern crate rustc_plugin;
pub fn foo(reg: &mut Registry) {
println!("{}", baz::baz());
}
- "#)
+ "#,
+ )
.build();
let _baz = project("baz")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
[lib]
name = "baz"
crate_type = ["dylib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
.build();
let target = cross_compile::alternate();
- assert_that(foo.cargo("build").arg("--target").arg(&target),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build").arg("--target").arg(&target),
+ execs().with_status(0),
+ );
}
#[test]
fn cross_tests() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
authors = []
[[bin]]
name = "bar"
- "#)
- .file("src/bin/bar.rs", &format!(r#"
+ "#,
+ )
+ .file(
+ "src/bin/bar.rs",
+ &format!(
+ r#"
#[allow(unused_extern_crates)]
extern crate foo;
use std::env;
assert_eq!(env::consts::ARCH, "{}");
}}
#[test] fn test() {{ main() }}
- "#, cross_compile::alternate_arch()))
- .file("src/lib.rs", &format!(r#"
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ &format!(
+ r#"
use std::env;
pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }}
#[test] fn test_foo() {{ foo() }}
- "#, cross_compile::alternate_arch()))
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
.build();
let target = cross_compile::alternate();
- assert_that(p.cargo("test").arg("--target").arg(&target),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("--target").arg(&target),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 ({foo})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]{triple}[/]debug[/]deps[/]foo-[..][EXE]
-[RUNNING] target[/]{triple}[/]debug[/]deps[/]bar-[..][EXE]", foo = p.url(), triple = target))
- .with_stdout_contains("test test_foo ... ok")
- .with_stdout_contains("test test ... ok"));
+[RUNNING] target[/]{triple}[/]debug[/]deps[/]bar-[..][EXE]",
+ foo = p.url(),
+ triple = target
+ ))
+ .with_stdout_contains("test test_foo ... ok")
+ .with_stdout_contains("test test ... ok"),
+ );
}
#[test]
fn no_cross_doctests() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
authors = []
version = "0.0.0"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
//! ```
//! extern crate foo;
//! assert!(true);
//! ```
- "#)
+ "#,
+ )
.build();
- let host_output = format!("\
+ let host_output = format!(
+ "\
[COMPILING] foo v0.0.0 ({foo})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo
-", foo = p.url());
+",
+ foo = p.url()
+ );
println!("a");
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&host_output));
+ assert_that(
+ p.cargo("test"),
+ execs().with_status(0).with_stderr(&host_output),
+ );
println!("b");
let target = cross_compile::host();
- assert_that(p.cargo("test").arg("--target").arg(&target),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("--target").arg(&target),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 ({foo})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]{triple}[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo
-", foo = p.url(), triple = target)));
+",
+ foo = p.url(),
+ triple = target
+ )),
+ );
println!("c");
let target = cross_compile::alternate();
- assert_that(p.cargo("test").arg("--target").arg(&target),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("--target").arg(&target),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 ({foo})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]{triple}[/]debug[/]deps[/]foo-[..][EXE]
-", foo = p.url(), triple = target)));
+",
+ foo = p.url(),
+ triple = target
+ )),
+ );
}
#[test]
fn simple_cargo_run() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
- "#)
- .file("src/main.rs", &format!(r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
use std::env;
fn main() {{
assert_eq!(env::consts::ARCH, "{}");
}}
- "#, cross_compile::alternate_arch()))
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
.build();
let target = cross_compile::alternate();
- assert_that(p.cargo("run").arg("--target").arg(&target),
- execs().with_status(0));
+ assert_that(
+ p.cargo("run").arg("--target").arg(&target),
+ execs().with_status(0),
+ );
}
#[test]
fn cross_with_a_build_script() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let target = cross_compile::alternate();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = 'build.rs'
- "#)
- .file("build.rs", &format!(r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
use std::env;
use std::path::PathBuf;
fn main() {{
path.pop();
assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target");
}}
- "#, target))
+ "#,
+ target
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("--target").arg(&target).arg("-v"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 (file://[..])
[RUNNING] `rustc [..] build.rs [..] --out-dir {dir}[/]target[/]debug[/]build[/]foo-[..]`
[RUNNING] `{dir}[/]target[/]debug[/]build[/]foo-[..][/]build-script-build`
[RUNNING] `rustc [..] src[/]main.rs [..] --target {target} [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", target = target,
- dir = p.root().display())));
+",
+ target = target,
+ dir = p.root().display()
+ )),
+ );
}
#[test]
fn build_script_needed_for_host_and_target() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let target = cross_compile::alternate();
let host = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
path = "d1"
[build-dependencies.d2]
path = "d2"
- "#)
-
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate d2;
fn main() { d2::d2(); }
- "#)
- .file("src/main.rs", "
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
#[allow(unused_extern_crates)]
extern crate d1;
fn main() { d1::d1(); }
- ")
- .file("d1/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.0"
authors = []
build = 'build.rs'
- "#)
- .file("d1/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "d1/src/lib.rs",
+ "
pub fn d1() {}
- ")
- .file("d1/build.rs", r#"
+ ",
+ )
+ .file(
+ "d1/build.rs",
+ r#"
use std::env;
fn main() {
let target = env::var("TARGET").unwrap();
println!("cargo:rustc-flags=-L /path/to/{}", target);
}
- "#)
- .file("d2/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.0"
[dependencies.d1]
path = "../d1"
- "#)
- .file("d2/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "d2/src/lib.rs",
+ "
#[allow(unused_extern_crates)]
extern crate d1;
pub fn d2() { d1::d1(); }
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"),
- execs().with_status(0)
- .with_stderr_contains(&format!("\
-[COMPILING] d1 v0.0.0 ({url}/d1)", url = p.url()))
- .with_stderr_contains(&format!("\
+ assert_that(
+ p.cargo("build").arg("--target").arg(&target).arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(&format!(
+ "\
+ [COMPILING] d1 v0.0.0 ({url}/d1)",
+ url = p.url()
+ ))
+ .with_stderr_contains(&format!("\
[RUNNING] `rustc [..] d1[/]build.rs [..] --out-dir {dir}[/]target[/]debug[/]build[/]d1-[..]`",
dir = p.root().display()))
- .with_stderr_contains(&format!("\
-[RUNNING] `{dir}[/]target[/]debug[/]build[/]d1-[..][/]build-script-build`",
- dir = p.root().display()))
- .with_stderr_contains("\
-[RUNNING] `rustc [..] d1[/]src[/]lib.rs [..]`")
- .with_stderr_contains(&format!("\
-[COMPILING] d2 v0.0.0 ({url}/d2)", url = p.url()))
- .with_stderr_contains(&format!("\
-[RUNNING] `rustc [..] d2[/]src[/]lib.rs [..] \
- -L /path/to/{host}`", host = host))
- .with_stderr_contains(&format!("\
-[COMPILING] foo v0.0.0 ({url})", url = p.url()))
- .with_stderr_contains(&format!("\
+ .with_stderr_contains(&format!(
+ "\
+ [RUNNING] `{dir}[/]target[/]debug[/]build[/]d1-[..][/]build-script-build`",
+ dir = p.root().display()
+ ))
+ .with_stderr_contains(
+ "\
+ [RUNNING] `rustc [..] d1[/]src[/]lib.rs [..]`",
+ )
+ .with_stderr_contains(&format!(
+ "\
+ [COMPILING] d2 v0.0.0 ({url}/d2)",
+ url = p.url()
+ ))
+ .with_stderr_contains(&format!(
+ "\
+ [RUNNING] `rustc [..] d2[/]src[/]lib.rs [..] \
+ -L /path/to/{host}`",
+ host = host
+ ))
+ .with_stderr_contains(&format!(
+ "\
+ [COMPILING] foo v0.0.0 ({url})",
+ url = p.url()
+ ))
+ .with_stderr_contains(&format!("\
[RUNNING] `rustc [..] build.rs [..] --out-dir {dir}[/]target[/]debug[/]build[/]foo-[..] \
-L /path/to/{host}`", dir = p.root().display(), host = host))
- .with_stderr_contains(&format!("\
-[RUNNING] `rustc [..] src[/]main.rs [..] --target {target} [..] \
- -L /path/to/{target}`", target = target)));
+ .with_stderr_contains(&format!(
+ "\
+ [RUNNING] `rustc [..] src[/]main.rs [..] --target {target} [..] \
+ -L /path/to/{target}`",
+ target = target
+ )),
+ );
}
#[test]
fn build_deps_for_the_right_arch() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[dependencies.d2]
path = "d2"
- "#)
+ "#,
+ )
.file("src/main.rs", "extern crate d2; fn main() {}")
- .file("d1/Cargo.toml", r#"
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.0"
authors = []
- "#)
- .file("d1/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "d1/src/lib.rs",
+ "
pub fn d1() {}
- ")
- .file("d2/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.0"
[build-dependencies.d1]
path = "../d1"
- "#)
+ "#,
+ )
.file("d2/build.rs", "extern crate d1; fn main() {}")
.file("d2/src/lib.rs", "")
.build();
let target = cross_compile::alternate();
- assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--target").arg(&target).arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
fn build_script_only_host() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
[build-dependencies.d1]
path = "d1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("build.rs", "extern crate d1; fn main() {}")
- .file("d1/Cargo.toml", r#"
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.0"
authors = []
build = "build.rs"
- "#)
- .file("d1/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "d1/src/lib.rs",
+ "
pub fn d1() {}
- ")
- .file("d1/build.rs", r#"
+ ",
+ )
+ .file(
+ "d1/build.rs",
+ r#"
use std::env;
fn main() {
.contains("target/debug/build/d1-"),
"bad: {:?}", env::var("OUT_DIR"));
}
- "#)
+ "#,
+ )
.build();
let target = cross_compile::alternate();
- assert_that(p.cargo("build").arg("--target").arg(&target).arg("-v"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--target").arg(&target).arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
fn plugin_build_script_right_arch() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
plugin = true
- "#)
+ "#,
+ )
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--target").arg(cross_compile::alternate()),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .arg("-v")
+ .arg("--target")
+ .arg(cross_compile::alternate()),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] build.rs [..]`
[RUNNING] `[..][/]build-script-build`
[RUNNING] `rustc [..] src[/]lib.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn build_script_with_platform_specific_dependencies() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let target = cross_compile::alternate();
let host = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[build-dependencies.d1]
path = "d1"
- "#)
- .file("build.rs", "
+ "#,
+ )
+ .file(
+ "build.rs",
+ "
#[allow(unused_extern_crates)]
extern crate d1;
fn main() {}
- ")
+ ",
+ )
.file("src/lib.rs", "")
- .file("d1/Cargo.toml", &format!(r#"
+ .file(
+ "d1/Cargo.toml",
+ &format!(
+ r#"
[package]
name = "d1"
version = "0.0.0"
[target.{}.dependencies]
d2 = {{ path = "../d2" }}
- "#, host))
- .file("d1/src/lib.rs", "
+ "#,
+ host
+ ),
+ )
+ .file(
+ "d1/src/lib.rs",
+ "
#[allow(unused_extern_crates)]
extern crate d2;
- ")
- .file("d2/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("d2/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--target").arg(&target),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v").arg("--target").arg(&target),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] d2 v0.0.0 ([..])
[RUNNING] `rustc [..] d2[/]src[/]lib.rs [..]`
[COMPILING] d1 v0.0.0 ([..])
[RUNNING] `{dir}[/]target[/]debug[/]build[/]foo-[..][/]build-script-build`
[RUNNING] `rustc [..] src[/]lib.rs [..] --target {target} [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.root().display(), target = target)));
+",
+ dir = p.root().display(),
+ target = target
+ )),
+ );
}
#[test]
fn platform_specific_dependencies_do_not_leak() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let target = cross_compile::alternate();
let host = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[build-dependencies.d1]
path = "d1"
- "#)
+ "#,
+ )
.file("build.rs", "extern crate d1; fn main() {}")
.file("src/lib.rs", "")
- .file("d1/Cargo.toml", &format!(r#"
+ .file(
+ "d1/Cargo.toml",
+ &format!(
+ r#"
[package]
name = "d1"
version = "0.0.0"
[target.{}.dependencies]
d2 = {{ path = "../d2" }}
- "#, host))
+ "#,
+ host
+ ),
+ )
.file("d1/src/lib.rs", "extern crate d2;")
- .file("d2/Cargo.toml", r#"
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.0"
authors = []
- "#)
+ "#,
+ )
.file("d2/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--target").arg(&target),
- execs().with_status(101)
- .with_stderr_contains("\
-[..] can't find crate for `d2`[..]"));
+ assert_that(
+ p.cargo("build").arg("-v").arg("--target").arg(&target),
+ execs().with_status(101).with_stderr_contains(
+ "\
+ [..] can't find crate for `d2`[..]",
+ ),
+ );
}
#[test]
fn platform_specific_variables_reflected_in_build_scripts() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let target = cross_compile::alternate();
let host = rustc_host();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
[target.{target}.dependencies]
d2 = {{ path = "d2" }}
- "#, host = host, target = target))
- .file("build.rs", &format!(r#"
+ "#,
+ host = host,
+ target = target
+ ),
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
use std::env;
fn main() {{
env::var(not_expected).err()
.expect(&format!("found {{}}", not_expected));
}}
- "#, host = host, target = target))
+ "#,
+ host = host,
+ target = target
+ ),
+ )
.file("src/lib.rs", "")
- .file("d1/Cargo.toml", r#"
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.0"
authors = []
links = "d1"
build = "build.rs"
- "#)
- .file("d1/build.rs", r#"
+ "#,
+ )
+ .file(
+ "d1/build.rs",
+ r#"
fn main() { println!("cargo:val=1") }
- "#)
+ "#,
+ )
.file("d1/src/lib.rs", "")
- .file("d2/Cargo.toml", r#"
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.0"
authors = []
links = "d2"
build = "build.rs"
- "#)
- .file("d2/build.rs", r#"
+ "#,
+ )
+ .file(
+ "d2/build.rs",
+ r#"
fn main() { println!("cargo:val=1") }
- "#)
+ "#,
+ )
.file("d2/src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
- assert_that(p.cargo("build").arg("-v").arg("--target").arg(&target),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("-v").arg("--target").arg(&target),
+ execs().with_status(0),
+ );
}
#[test]
fn cross_test_dylib() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let target = cross_compile::alternate();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate bar as the_bar;
pub fn bar() { the_bar::baz(); }
#[test]
fn foo() { bar(); }
- "#)
- .file("tests/test.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
extern crate foo as the_foo;
#[test]
fn foo() { the_foo::bar(); }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
crate_type = ["dylib"]
- "#)
- .file("bar/src/lib.rs", &format!(r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ &format!(
+ r#"
use std::env;
pub fn baz() {{
assert_eq!(env::consts::ARCH, "{}");
}}
- "#, cross_compile::alternate_arch()))
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
.build();
- assert_that(p.cargo("test").arg("--target").arg(&target),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("--target").arg(&target),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 ({dir}/bar)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]{arch}[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]{arch}[/]debug[/]deps[/]test-[..][EXE]",
- dir = p.url(), arch = cross_compile::alternate()))
- .with_stdout_contains_n("test foo ... ok", 2));
-
+ dir = p.url(),
+ arch = cross_compile::alternate()
+ ))
+ .with_stdout_contains_n("test foo ... ok", 2),
+ );
}
use std::path::PathBuf;
use std::io::prelude::*;
-use cargotest::support::{project, execs, cross_compile, publish};
+use cargotest::support::{cross_compile, execs, project, publish};
use hamcrest::{assert_that, contains};
use flate2::read::GzDecoder;
use tar::Archive;
#[test]
fn simple_cross_package() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
license = "MIT"
description = "foo"
repository = "bar"
- "#)
- .file("src/main.rs", &format!(r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
use std::env;
fn main() {{
assert_eq!(env::consts::ARCH, "{}");
}}
- "#, cross_compile::alternate_arch()))
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
.build();
let target = cross_compile::alternate();
- assert_that(p.cargo("package").arg("--target").arg(&target),
- execs().with_status(0).with_status(0).with_stderr(&format!(
-" Packaging foo v0.0.0 ({dir})
+ assert_that(
+ p.cargo("package").arg("--target").arg(&target),
+ execs().with_status(0).with_status(0).with_stderr(&format!(
+ " Packaging foo v0.0.0 ({dir})
Verifying foo v0.0.0 ({dir})
Compiling foo v0.0.0 ({dir}/target/package/foo-0.0.0)
Finished dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
// Check that the tarball contains the files
let f = File::open(&p.root().join("target/package/foo-0.0.0.crate")).unwrap();
rdr.read_to_end(&mut contents).unwrap();
let mut ar = Archive::new(&contents[..]);
let entries = ar.entries().unwrap();
- let entry_paths = entries.map(|entry| {
- entry.unwrap().path().unwrap().into_owned()
- }).collect::<Vec<PathBuf>>();
- assert_that(&entry_paths, contains(vec![PathBuf::from("foo-0.0.0/Cargo.toml")]));
- assert_that(&entry_paths, contains(vec![PathBuf::from("foo-0.0.0/Cargo.toml.orig")]));
- assert_that(&entry_paths, contains(vec![PathBuf::from("foo-0.0.0/src/main.rs")]));
+ let entry_paths = entries
+ .map(|entry| entry.unwrap().path().unwrap().into_owned())
+ .collect::<Vec<PathBuf>>();
+ assert_that(
+ &entry_paths,
+ contains(vec![PathBuf::from("foo-0.0.0/Cargo.toml")]),
+ );
+ assert_that(
+ &entry_paths,
+ contains(vec![PathBuf::from("foo-0.0.0/Cargo.toml.orig")]),
+ );
+ assert_that(
+ &entry_paths,
+ contains(vec![PathBuf::from("foo-0.0.0/src/main.rs")]),
+ );
}
#[test]
fn publish_with_target() {
- if cross_compile::disabled() { return }
+ if cross_compile::disabled() {
+ return;
+ }
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
license = "MIT"
description = "foo"
repository = "bar"
- "#)
- .file("src/main.rs", &format!(r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r#"
use std::env;
fn main() {{
assert_eq!(env::consts::ARCH, "{}");
}}
- "#, cross_compile::alternate_arch()))
+ "#,
+ cross_compile::alternate_arch()
+ ),
+ )
.build();
let target = cross_compile::alternate();
- assert_that(p.cargo("publish")
- .arg("--index").arg(publish::registry().to_string())
- .arg("--target").arg(&target),
- execs().with_status(0).with_stderr(&format!(
-" Updating registry `{registry}`
+ assert_that(
+ p.cargo("publish")
+ .arg("--index")
+ .arg(publish::registry().to_string())
+ .arg("--target")
+ .arg(&target),
+ execs().with_status(0).with_stderr(&format!(
+ " Updating registry `{registry}`
Packaging foo v0.0.0 ({dir})
Verifying foo v0.0.0 ({dir})
Compiling foo v0.0.0 ({dir}/target/package/foo-0.0.0)
Finished dev [unoptimized + debuginfo] target(s) in [..]
Uploading foo v0.0.0 ({dir})
-", dir = p.url(), registry = publish::registry())));
+",
+ dir = p.url(),
+ registry = publish::registry()
+ )),
+ );
}
use std::fs;
use std::io::{self, Read};
use std::net::TcpListener;
-use std::process::{Stdio, Child};
+use std::process::{Child, Stdio};
use std::thread;
use std::time::Duration;
// can succeed or not.
#[cfg(windows)]
fn enabled() -> bool {
- use winapi::um::{handleapi, jobapi, jobapi2, processthreadsapi};
+ use winapi::um::{handleapi, jobapi, processthreadsapi, jobapi2};
unsafe {
// If we're not currently in a job, then we can definitely run these
let r = jobapi::IsProcessInJob(me, 0 as *mut _, &mut ret);
assert_ne!(r, 0);
if ret == ::winapi::shared::minwindef::FALSE {
- return true
+ return true;
}
// If we are in a job, then we can run these tests if we can be added to
#[test]
fn ctrl_c_kills_everyone() {
if !enabled() {
- return
+ return;
}
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let addr = listener.local_addr().unwrap();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", &format!(r#"
+ .file(
+ "build.rs",
+ &format!(
+ r#"
use std::net::TcpStream;
use std::io::Read;
let _ = socket.read(&mut [0; 10]);
panic!("that read should never return");
}}
- "#, addr))
+ "#,
+ addr
+ ),
+ )
.build();
let mut cargo = p.cargo("build").build_command();
- cargo.stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped())
- .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1");
+ cargo
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1");
let mut child = cargo.spawn().unwrap();
let mut sock = listener.accept().unwrap().0;
thread::sleep(Duration::from_millis(100));
}
- panic!("couldn't remove build directory after a few tries, seems like \
- we won't be able to!");
+ panic!(
+ "couldn't remove build directory after a few tries, seems like \
+ we won't be able to!"
+ );
}
#[cfg(unix)]
-use cargotest::support::{basic_bin_manifest, main_file, execs, project};
+use cargotest::support::{basic_bin_manifest, execs, main_file, project};
use filetime::FileTime;
use hamcrest::{assert_that, existing_file};
#[test]
fn build_dep_info_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[example]]
name = "ex"
crate-type = ["lib"]
- "#)
+ "#,
+ )
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
- assert_that(&p.example_lib("ex", "lib").with_extension("d"), existing_file());
+ assert_that(
+ &p.example_lib("ex", "lib").with_extension("d"),
+ existing_file(),
+ );
}
-
#[test]
fn build_dep_info_rlib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[example]]
name = "ex"
crate-type = ["rlib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
- assert_that(&p.example_lib("ex", "rlib").with_extension("d"), existing_file());
+ assert_that(
+ &p.example_lib("ex", "rlib").with_extension("d"),
+ existing_file(),
+ );
}
#[test]
fn build_dep_info_dylib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[example]]
name = "ex"
crate-type = ["dylib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
- assert_that(&p.example_lib("ex", "dylib").with_extension("d"), existing_file());
+ assert_that(
+ &p.example_lib("ex", "dylib").with_extension("d"),
+ existing_file(),
+ );
}
#[test]
fn no_rewrite_if_no_change() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
use cargotest::cargo_process;
use cargotest::support::git;
use cargotest::support::paths;
-use cargotest::support::registry::{Package, cksum};
-use cargotest::support::{project, execs, ProjectBuilder};
+use cargotest::support::registry::{cksum, Package};
+use cargotest::support::{execs, project, ProjectBuilder};
use hamcrest::assert_that;
fn setup() {
let root = paths::root();
t!(fs::create_dir(&root.join(".cargo")));
- t!(t!(File::create(root.join(".cargo/config"))).write_all(br#"
+ t!(t!(File::create(root.join(".cargo/config"))).write_all(
+ br#"
[source.crates-io]
replace-with = 'my-awesome-local-registry'
[source.my-awesome-local-registry]
directory = 'index'
- "#));
+ "#
+ ));
}
struct VendorPackage {
fn file(&mut self, name: &str, contents: &str) -> &mut VendorPackage {
self.p = Some(self.p.take().unwrap().file(name, contents));
- self.cksum.files.insert(name.to_string(), cksum(contents.as_bytes()));
+ self.cksum
+ .files
+ .insert(name.to_string(), cksum(contents.as_bytes()));
self
}
setup();
VendorPackage::new("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
pub fn bar() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.1.0
[COMPILING] bar v0.1.0 ([..]bar)
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
setup();
VendorPackage::new("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
VendorPackage::new("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate foo;
pub fn main() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
- assert_that(cargo_process().arg("install").arg("bar"),
- execs().with_status(0).with_stderr(
-" Installing bar v0.1.0
+ assert_that(
+ cargo_process().arg("install").arg("bar"),
+ execs().with_status(0).with_stderr(
+ " Installing bar v0.1.0
Compiling foo v0.1.0
Compiling bar v0.1.0
Finished release [optimized] target(s) in [..] secs
Installing [..]bar[..]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
-"));
+",
+ ),
+ );
}
#[test]
setup();
VendorPackage::new("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
VendorPackage::new("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
baz = "9.8.7"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate foo;
pub fn main() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
- assert_that(cargo_process().arg("install").arg("bar"),
- execs().with_status(101).with_stderr(
-" Installing bar v0.1.0
+ assert_that(
+ cargo_process().arg("install").arg("bar"),
+ execs().with_status(101).with_stderr(
+ " Installing bar v0.1.0
error: failed to compile `bar v0.1.0`, intermediate artifacts can be found at `[..]`
Caused by:
no matching package named `baz` found
location searched: registry `https://github.com/rust-lang/crates.io-index`
required by package `bar v0.1.0`
-"));
+",
+ ),
+ );
}
#[test]
setup();
VendorPackage::new("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
VendorPackage::new("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[features]
wantbaz = ["baz"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate foo;
pub fn main() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
- assert_that(cargo_process().arg("install").arg("bar"),
- execs().with_status(0).with_stderr(
-" Installing bar v0.1.0
+ assert_that(
+ cargo_process().arg("install").arg("bar"),
+ execs().with_status(0).with_stderr(
+ " Installing bar v0.1.0
Compiling foo v0.1.0
Compiling bar v0.1.0
Finished release [optimized] target(s) in [..] secs
Installing [..]bar[..]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
-"));
+",
+ ),
+ );
}
#[test]
let _ = project("index").build();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
pub fn bar() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: no matching package named `foo` found
location searched: [..]
required by package `bar v0.1.0 ([..])`
-"));
+",
+ ),
+ );
}
#[test]
setup();
VendorPackage::new("foo-0.1.0")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.file(".cargo-checksum", "")
.build();
VendorPackage::new("foo-0.2.0")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.2.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.file(".cargo-checksum", "")
.build();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
pub fn bar() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.1.0
[COMPILING] bar v0.1.0 ([..]bar)
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn crates_io_then_directory() {
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
pub fn bar() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
let cksum = Package::new("foo", "0.1.0")
- .file("src/lib.rs", "pub fn foo() -> u32 { 0 }")
- .publish();
+ .file("src/lib.rs", "pub fn foo() -> u32 { 0 }")
+ .publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] foo v0.1.0 ([..])
[COMPILING] foo v0.1.0
[COMPILING] bar v0.1.0 ([..]bar)
[FINISHED] [..]
-"));
+",
+ ),
+ );
setup();
let mut v = VendorPackage::new("foo");
- v.file("Cargo.toml", r#"
+ v.file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#);
+ "#,
+ );
v.file("src/lib.rs", "pub fn foo() -> u32 { 1 }");
v.cksum.package = Some(cksum);
v.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.1.0
[COMPILING] bar v0.1.0 ([..]bar)
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn crates_io_then_bad_checksum() {
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
Package::new("foo", "0.1.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
setup();
VendorPackage::new("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: checksum for `foo v0.1.0` changed between lock files
this could be indicative of a few possible errors:
unable to verify that `foo v0.1.0` is the same as when the lockfile was generated
-"));
+",
+ ),
+ );
}
#[test]
setup();
VendorPackage::new("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
t!(f.write_all(b"fn foo() -> u32 { 0 }"));
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: the listed checksum of `[..]lib.rs` has changed:
expected: [..]
actual: [..]
directory sources are not intended to be edited, if modifications are \
required then it is recommended that [replace] is used with a forked copy of \
the source
-"));
+",
+ ),
+ );
}
#[test]
setup();
VendorPackage::new("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- VendorPackage::new("bar")
- .file(".foo", "")
- .build();
+ VendorPackage::new("bar").file(".foo", "").build();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
setup();
VendorPackage::new("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
VendorPackage::new("bar")
.build();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
#[test]
fn git_lock_file_doesnt_change() {
-
let git = git::new("git", |p| {
- p.file("Cargo.toml", r#"
+ p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "git"
version = "0.5.0"
authors = []
- "#)
- .file("src/lib.rs", "")
+ "#,
+ ).file("src/lib.rs", "")
}).unwrap();
VendorPackage::new("git")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "git"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.disable_checksum()
.build();
let p = project("bar")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
git = {{ git = '{0}' }}
- "#, git.url()))
+ "#,
+ git.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
let root = paths::root();
t!(fs::create_dir(&root.join(".cargo")));
- t!(t!(File::create(root.join(".cargo/config"))).write_all(&format!(r#"
+ t!(
+ t!(File::create(root.join(".cargo/config"))).write_all(&format!(
+ r#"
[source.my-git-repo]
git = '{}'
replace-with = 'my-awesome-local-registry'
[source.my-awesome-local-registry]
directory = 'index'
- "#, git.url()).as_bytes()));
-
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ "#,
+ git.url()
+ ).as_bytes())
+ );
+
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] [..]
[COMPILING] [..]
[FINISHED] [..]
-"));
+",
+ ),
+ );
let mut lock2 = String::new();
t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock2));
#[test]
fn git_override_requires_lockfile() {
VendorPackage::new("git")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "git"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.disable_checksum()
.build();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
git = { git = 'https://example.com/' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let root = paths::root();
t!(fs::create_dir(&root.join(".cargo")));
- t!(t!(File::create(root.join(".cargo/config"))).write_all(br#"
+ t!(t!(File::create(root.join(".cargo/config"))).write_all(
+ br#"
[source.my-git-repo]
git = 'https://example.com/'
replace-with = 'my-awesome-local-registry'
[source.my-awesome-local-registry]
directory = 'index'
- "#));
+ "#
+ ));
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to load source for a dependency on `git`
Caused by:
remove the source replacement configuration, generate a lock file, and then
restore the source replacement configuration to continue the build
-"));
+",
+ ),
+ );
}
use std::io::Read;
use cargotest::rustc_host;
-use cargotest::support::{project, execs, path2url};
+use cargotest::support::{execs, project, path2url};
use cargotest::support::registry::Package;
-use hamcrest::{assert_that, existing_file, existing_dir, is_not};
+use hamcrest::{assert_that, existing_dir, existing_file, is_not};
use cargo::util::ProcessError;
#[test]
fn simple() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("build.rs", "fn main() {}")
- .file("src/lib.rs", r#"
+ .file(
+ "src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("doc"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[..] foo v0.0.1 ({dir})
[..] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = path2url(p.root()))));
+ dir = path2url(p.root())
+ )),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
assert_that(&p.root().join("target/doc/foo/index.html"), existing_file());
}
#[test]
fn doc_no_libs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name = "foo"
doc = false
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
bad code
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0));
+ assert_that(p.cargo("doc"), execs().with_status(0));
}
#[test]
fn doc_twice() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("doc"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[DOCUMENTING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = path2url(p.root()))));
+ dir = path2url(p.root())
+ )),
+ );
- assert_that(p.cargo("doc"),
- execs().with_status(0).with_stdout(""))
+ assert_that(p.cargo("doc"), execs().with_status(0).with_stdout(""))
}
#[test]
fn doc_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate bar;
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("doc"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[..] bar v0.0.1 ({dir}/bar)
[..] bar v0.0.1 ({dir}/bar)
[DOCUMENTING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = path2url(p.root()))));
+ dir = path2url(p.root())
+ )),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
assert_that(&p.root().join("target/doc/foo/index.html"), existing_file());
assert_that(&p.root().join("target/doc/bar/index.html"), existing_file());
- assert_that(p.cargo("doc")
- .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"),
- execs().with_status(0).with_stdout(""));
+ assert_that(
+ p.cargo("doc")
+ .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"),
+ execs().with_status(0).with_stdout(""),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
assert_that(&p.root().join("target/doc/foo/index.html"), existing_file());
#[test]
fn doc_no_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate bar;
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("--no-deps"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("doc").arg("--no-deps"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 ({dir}/bar)
[DOCUMENTING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = path2url(p.root()))));
+ dir = path2url(p.root())
+ )),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
assert_that(&p.root().join("target/doc/foo/index.html"), existing_file());
- assert_that(&p.root().join("target/doc/bar/index.html"), is_not(existing_file()));
+ assert_that(
+ &p.root().join("target/doc/bar/index.html"),
+ is_not(existing_file()),
+ );
}
#[test]
fn doc_only_bin() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("doc").arg("-v"), execs().with_status(0));
assert_that(&p.root().join("target/doc"), existing_dir());
assert_that(&p.root().join("target/doc/bar/index.html"), existing_file());
#[test]
fn doc_multiple_targets_same_name_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[lib]
name = "foo_lib"
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[lib]
name = "foo_lib"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("doc").arg("--all"),
- execs()
- .with_status(101)
- .with_stderr_contains("[..] library `foo_lib` is specified [..]")
- .with_stderr_contains("[..] `foo v0.1.0[..]` [..]")
- .with_stderr_contains("[..] `bar v0.1.0[..]` [..]"));
+ assert_that(
+ p.cargo("doc").arg("--all"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains("[..] library `foo_lib` is specified [..]")
+ .with_stderr_contains("[..] `foo v0.1.0[..]` [..]")
+ .with_stderr_contains("[..] `bar v0.1.0[..]` [..]"),
+ );
}
#[test]
fn doc_multiple_targets_same_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[[bin]]
name = "foo_lib"
path = "src/foo_lib.rs"
- "#)
+ "#,
+ )
.file("foo/src/foo_lib.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[lib]
name = "foo_lib"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- let root = path2url(p.root());
-
- assert_that(p.cargo("doc").arg("--all"),
- execs()
- .with_status(0)
- .with_stderr_contains(&format!("[DOCUMENTING] foo v0.1.0 ({}/foo)", root))
- .with_stderr_contains(&format!("[DOCUMENTING] bar v0.1.0 ({}/bar)", root))
- .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"));
- assert_that(&p.root().join("target/doc"), existing_dir());
- let doc_file = p.root().join("target/doc/foo_lib/index.html");
- assert_that(&doc_file, existing_file());
+ let root = path2url(p.root());
+
+ assert_that(
+ p.cargo("doc").arg("--all"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(&format!("[DOCUMENTING] foo v0.1.0 ({}/foo)", root))
+ .with_stderr_contains(&format!("[DOCUMENTING] bar v0.1.0 ({}/bar)", root))
+ .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"),
+ );
+ assert_that(&p.root().join("target/doc"), existing_dir());
+ let doc_file = p.root().join("target/doc/foo_lib/index.html");
+ assert_that(&doc_file, existing_file());
}
#[test]
fn doc_multiple_targets_same_name_bin() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[[bin]]
name = "foo-cli"
- "#)
+ "#,
+ )
.file("foo/src/foo-cli.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[[bin]]
name = "foo-cli"
- "#)
+ "#,
+ )
.file("bar/src/foo-cli.rs", "")
.build();
- assert_that(p.cargo("doc").arg("--all"),
- execs()
- .with_status(101)
- .with_stderr_contains("[..] binary `foo_cli` is specified [..]")
- .with_stderr_contains("[..] `foo v0.1.0[..]` [..]")
- .with_stderr_contains("[..] `bar v0.1.0[..]` [..]"));
+ assert_that(
+ p.cargo("doc").arg("--all"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains("[..] binary `foo_cli` is specified [..]")
+ .with_stderr_contains("[..] `foo v0.1.0[..]` [..]")
+ .with_stderr_contains("[..] `bar v0.1.0[..]` [..]"),
+ );
}
#[test]
fn doc_multiple_targets_same_name_undoced() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[[bin]]
name = "foo-cli"
- "#)
+ "#,
+ )
.file("foo/src/foo-cli.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[[bin]]
name = "foo-cli"
doc = false
- "#)
+ "#,
+ )
.file("bar/src/foo-cli.rs", "")
.build();
- assert_that(p.cargo("doc").arg("--all"),
- execs().with_status(0));
+ assert_that(p.cargo("doc").arg("--all"), execs().with_status(0));
}
#[test]
fn doc_lib_bin_same_name_documents_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
//! Binary documentation
extern crate foo;
fn main() {
foo::foo();
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
//! Library documentation
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("doc"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[DOCUMENTING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = path2url(p.root()))));
+",
+ dir = path2url(p.root())
+ )),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
let doc_file = p.root().join("target/doc/foo/index.html");
assert_that(&doc_file, existing_file());
let mut doc_html = String::new();
- File::open(&doc_file).unwrap().read_to_string(&mut doc_html).unwrap();
+ File::open(&doc_file)
+ .unwrap()
+ .read_to_string(&mut doc_html)
+ .unwrap();
assert!(doc_html.contains("Library"));
assert!(!doc_html.contains("Binary"));
}
#[test]
fn doc_lib_bin_same_name_documents_lib_when_requested() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
//! Binary documentation
extern crate foo;
fn main() {
foo::foo();
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
//! Library documentation
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("--lib"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("doc").arg("--lib"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[DOCUMENTING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = path2url(p.root()))));
+",
+ dir = path2url(p.root())
+ )),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
let doc_file = p.root().join("target/doc/foo/index.html");
assert_that(&doc_file, existing_file());
let mut doc_html = String::new();
- File::open(&doc_file).unwrap().read_to_string(&mut doc_html).unwrap();
+ File::open(&doc_file)
+ .unwrap()
+ .read_to_string(&mut doc_html)
+ .unwrap();
assert!(doc_html.contains("Library"));
assert!(!doc_html.contains("Binary"));
}
#[test]
fn doc_lib_bin_same_name_documents_named_bin_when_requested() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
//! Binary documentation
extern crate foo;
fn main() {
foo::foo();
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
//! Library documentation
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("--bin").arg("foo"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("doc").arg("--bin").arg("foo"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[DOCUMENTING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = path2url(p.root()))));
+",
+ dir = path2url(p.root())
+ )),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
let doc_file = p.root().join("target/doc/foo/index.html");
assert_that(&doc_file, existing_file());
let mut doc_html = String::new();
- File::open(&doc_file).unwrap().read_to_string(&mut doc_html).unwrap();
+ File::open(&doc_file)
+ .unwrap()
+ .read_to_string(&mut doc_html)
+ .unwrap();
assert!(!doc_html.contains("Library"));
assert!(doc_html.contains("Binary"));
}
#[test]
fn doc_lib_bin_same_name_documents_bins_when_requested() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
//! Binary documentation
extern crate foo;
fn main() {
foo::foo();
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
//! Library documentation
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("--bins"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("doc").arg("--bins"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[DOCUMENTING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = path2url(p.root()))));
+",
+ dir = path2url(p.root())
+ )),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
let doc_file = p.root().join("target/doc/foo/index.html");
assert_that(&doc_file, existing_file());
let mut doc_html = String::new();
- File::open(&doc_file).unwrap().read_to_string(&mut doc_html).unwrap();
+ File::open(&doc_file)
+ .unwrap()
+ .read_to_string(&mut doc_html)
+ .unwrap();
assert!(!doc_html.contains("Library"));
assert!(doc_html.contains("Binary"));
}
#[test]
fn doc_dash_p() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "extern crate a;")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[dependencies.b]
path = "../b"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "extern crate b;")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("doc").arg("-p").arg("a"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("doc").arg("-p").arg("a"),
+ execs().with_status(0).with_stderr(
+ "\
[..] b v0.0.1 (file://[..])
[..] b v0.0.1 (file://[..])
[DOCUMENTING] a v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn doc_same_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/main.rs", "fn main() {}")
.file("examples/main.rs", "fn main() {}")
.file("tests/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0));
+ assert_that(p.cargo("doc"), execs().with_status(0));
}
#[test]
const TARGET: &'static str = "arm-unknown-linux-gnueabihf";
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(no_core)]
#![no_core]
extern {
pub static A: u32;
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("--target").arg(TARGET).arg("--verbose"),
- execs().with_status(0));
- assert_that(&p.root().join(&format!("target/{}/doc", TARGET)), existing_dir());
- assert_that(&p.root().join(&format!("target/{}/doc/foo/index.html", TARGET)), existing_file());
+ assert_that(
+ p.cargo("doc").arg("--target").arg(TARGET).arg("--verbose"),
+ execs().with_status(0),
+ );
+ assert_that(
+ &p.root().join(&format!("target/{}/doc", TARGET)),
+ existing_dir(),
+ );
+ assert_that(
+ &p.root()
+ .join(&format!("target/{}/doc/foo/index.html", TARGET)),
+ existing_file(),
+ );
}
#[test]
fn target_specific_not_documented() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[target.foo.dependencies]
a = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "not rust")
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0));
+ assert_that(p.cargo("doc"), execs().with_status(0));
}
#[test]
fn output_not_captured() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
a = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
- .file("a/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ "
/// ```
/// ☃
/// ```
pub fn foo() {}
- ")
+ ",
+ )
.build();
let error = p.cargo("doc").exec_with_output().err().unwrap();
let stderr = str::from_utf8(&output.stderr).unwrap();
assert!(stderr.contains("☃"), "no snowman\n{}", stderr);
- assert!(stderr.contains("unknown start of token"), "no message{}", stderr);
+ assert!(
+ stderr.contains("unknown start of token"),
+ "no message{}",
+ stderr
+ );
} else {
- assert!(false, "an error kind other than ProcessErrorKind was encountered");
+ assert!(
+ false,
+ "an error kind other than ProcessErrorKind was encountered"
+ );
}
}
#[test]
fn target_specific_documented() {
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
a = {{ path = "a" }}
[target.{}.dependencies]
a = {{ path = "a" }}
- "#, rustc_host()))
- .file("src/lib.rs", "
+ "#,
+ rustc_host()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate a;
/// test
pub fn foo() {}
- ")
- .file("a/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
- .file("a/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ "
/// test
pub fn foo() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0));
+ assert_that(p.cargo("doc"), execs().with_status(0));
}
#[test]
fn no_document_build_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[build-dependencies]
a = { path = "a" }
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
- ")
- .file("a/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
- .file("a/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ "
/// ```
/// ☃
/// ```
pub fn foo() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0));
+ assert_that(p.cargo("doc"), execs().with_status(0));
}
#[test]
fn doc_release() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("--release"),
- execs().with_status(0));
- assert_that(p.cargo("doc").arg("--release").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(p.cargo("build").arg("--release"), execs().with_status(0));
+ assert_that(
+ p.cargo("doc").arg("--release").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[DOCUMENTING] foo v0.0.1 ([..])
[RUNNING] `rustdoc [..] src[/]lib.rs [..]`
[FINISHED] release [optimized] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn doc_multiple_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.baz]
path = "baz"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate bar;
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
- "#)
- .file("baz/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc")
- .arg("-p").arg("bar")
- .arg("-p").arg("baz")
- .arg("-v"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("doc")
+ .arg("-p")
+ .arg("bar")
+ .arg("-p")
+ .arg("baz")
+ .arg("-v"),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
assert_that(&p.root().join("target/doc/bar/index.html"), existing_file());
#[test]
fn features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[features]
foo = ["bar/bar"]
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[cfg(feature = "foo")]
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[features]
bar = []
- "#)
- .file("bar/build.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
fn main() {
println!("cargo:rustc-cfg=bar");
}
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[cfg(feature = "bar")]
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("--features").arg("foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("doc").arg("--features").arg("foo"),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("target/doc"), existing_dir());
- assert_that(&p.root().join("target/doc/foo/fn.foo.html"), existing_file());
- assert_that(&p.root().join("target/doc/bar/fn.bar.html"), existing_file());
+ assert_that(
+ &p.root().join("target/doc/foo/fn.foo.html"),
+ existing_file(),
+ );
+ assert_that(
+ &p.root().join("target/doc/bar/fn.bar.html"),
+ existing_file(),
+ );
}
#[test]
fn rerun_when_dir_removed() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// dox
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc"),
- execs().with_status(0));
+ assert_that(p.cargo("doc"), execs().with_status(0));
assert_that(&p.root().join("target/doc/foo/index.html"), existing_file());
fs::remove_dir_all(p.root().join("target/doc/foo")).unwrap();
- assert_that(p.cargo("doc"),
- execs().with_status(0));
+ assert_that(p.cargo("doc"), execs().with_status(0));
assert_that(&p.root().join("target/doc/foo/index.html"), existing_file());
}
#[test]
fn document_only_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// dox
pub fn foo() {}
- "#)
- .file("src/bin/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/bar.rs",
+ r#"
/// ```
/// ☃
/// ```
pub fn foo() {}
fn main() { foo(); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("--lib"),
- execs().with_status(0));
+ assert_that(p.cargo("doc").arg("--lib"), execs().with_status(0));
assert_that(&p.root().join("target/doc/foo/index.html"), existing_file());
}
#[test]
fn plugins_no_use_target() {
if !cargotest::is_nightly() {
- return
+ return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
proc-macro = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("doc")
- .arg("--target=x86_64-unknown-openbsd")
- .arg("-v"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("doc")
+ .arg("--target=x86_64-unknown-openbsd")
+ .arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
fn doc_all_workspace() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
bar = { path = "bar" }
[workspace]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
// The order in which bar is compiled or documented is not deterministic
- assert_that(p.cargo("doc")
- .arg("--all"),
- execs().with_status(0)
- .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
- .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
- .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])"));
+ assert_that(
+ p.cargo("doc").arg("--all"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])"),
+ );
}
#[test]
fn doc_all_virtual_manifest() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
// The order in which foo and bar are documented is not guaranteed
- assert_that(p.cargo("doc")
- .arg("--all"),
- execs().with_status(0)
- .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
- .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])"));
+ assert_that(
+ p.cargo("doc").arg("--all"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])"),
+ );
}
#[test]
fn doc_virtual_manifest_all_implied() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
// The order in which foo and bar are documented is not guaranteed
- assert_that(p.cargo("doc"),
- execs().with_status(0)
- .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
- .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])"));
+ assert_that(
+ p.cargo("doc"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])"),
+ );
}
#[test]
fn doc_all_member_dependency_same_name() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["a"]
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.1.0"
[dependencies]
a = "0.1.0"
- "#)
- .file("a/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
pub fn a() {}
- "#)
+ "#,
+ )
.build();
Package::new("a", "0.1.0").publish();
- assert_that(p.cargo("doc")
- .arg("--all"),
- execs().with_status(0)
- .with_stderr_contains("[..] Updating registry `[..]`")
- .with_stderr_contains("[..] Documenting a v0.1.0 ([..])"));
+ assert_that(
+ p.cargo("doc").arg("--all"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[..] Updating registry `[..]`")
+ .with_stderr_contains("[..] Documenting a v0.1.0 ([..])"),
+ );
}
#[test]
fn doc_workspace_open_help_message() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo", "bar"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
// The order in which bar is compiled or documented is not deterministic
- assert_that(p.cargo("doc")
- .arg("--all")
- .arg("--open"),
- execs().with_status(101)
- .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
- .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
- .with_stderr_contains("error: Passing multiple packages and `open` is not supported.")
- .with_stderr_contains("Please re-run this command with `-p <spec>` where `<spec>` is one of the following:")
- .with_stderr_contains(" foo")
- .with_stderr_contains(" bar"));
+ assert_that(
+ p.cargo("doc").arg("--all").arg("--open"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+ .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+ .with_stderr_contains(
+ "error: Passing multiple packages and `open` \
+ is not supported.",
+ )
+ .with_stderr_contains(
+ "Please re-run this command with `-p <spec>` \
+ where `<spec>` is one of the following:",
+ )
+ .with_stderr_contains(" foo")
+ .with_stderr_contains(" bar"),
+ );
}
use std::io::prelude::*;
use cargotest::support::paths::CargoPathExt;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn invalid1() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[features]
bar = ["baz"]
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Feature `bar` includes `baz` which is neither a dependency nor another feature
-"));
+",
+ ),
+ );
}
#[test]
fn invalid2() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Features and dependencies cannot have the same name: `bar`
-"));
+",
+ ),
+ );
}
#[test]
fn invalid3() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.baz]
path = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Feature `bar` depends on `baz` which is not an optional dependency.
Consider adding `optional = true` to the dependency
-"));
+",
+ ),
+ );
}
#[test]
fn invalid4() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
features = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to select a version for `bar`.
... required by package `foo v0.0.1 ([..])`
versions that meet the requirements `*` are: 0.0.1
the package `foo` depends on `bar`, with features: `bar` but `bar` does not have these features.
-failed to select a version for `bar` which could resolve this conflict"));
+failed to select a version for `bar` which could resolve this conflict",
+ ),
+ );
- p.change_file("Cargo.toml", r#"
+ p.change_file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#);
-
- assert_that(p.cargo("build").arg("--features").arg("test"),
- execs().with_status(101).with_stderr("\
-error: Package `foo v0.0.1 ([..])` does not have these features: `test`"));
+ "#,
+ );
+
+ assert_that(
+ p.cargo("build").arg("--features").arg("test"),
+ execs().with_status(101).with_stderr(
+ "\
+ error: Package `foo v0.0.1 ([..])` does not have these features: `test`",
+ ),
+ );
}
#[test]
fn invalid5() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dev-dependencies.bar]
path = "bar"
optional = true
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Dev-dependencies are not allowed to be optional: `bar`
-"));
+",
+ ),
+ );
}
#[test]
fn invalid6() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[features]
foo = ["bar/baz"]
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("build").arg("--features").arg("foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--features").arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Feature `foo` requires a feature of `bar` which is not a dependency
-"));
+",
+ ),
+ );
}
#[test]
fn invalid7() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[features]
foo = ["bar/baz"]
bar = []
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("build").arg("--features").arg("foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--features").arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Feature `foo` requires a feature of `bar` which is not a dependency
-"));
+",
+ ),
+ );
}
#[test]
fn invalid8() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
features = ["foo/bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("--features").arg("foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--features").arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] feature names may not contain slashes: `foo/bar`
-"));
+",
+ ),
+ );
}
#[test]
fn invalid9() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
#[test]
fn invalid10() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
features = ["baz"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies.baz]
path = "baz"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
- .file("bar/baz/Cargo.toml", r#"
+ .file(
+ "bar/baz/Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/baz/src/lib.rs", "")
.build();
#[test]
fn no_transitive_dep_feature_requirement() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[features]
default = ["derived/bar/qux"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate derived;
fn main() { derived::test(); }
- "#)
- .file("derived/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "derived/Cargo.toml",
+ r#"
[package]
name = "derived"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("derived/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "derived/src/lib.rs",
+ r#"
extern crate bar;
pub use bar::test;
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[features]
qux = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[cfg(feature = "qux")]
pub fn test() { print!("test"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] feature names may not contain slashes: `bar/qux`
-"));
+",
+ ),
+ );
}
#[test]
fn no_feature_doesnt_build() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
optional = true
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[cfg(feature = "bar")]
extern crate bar;
#[cfg(feature = "bar")]
fn main() { bar::bar(); println!("bar") }
#[cfg(not(feature = "bar"))]
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn bar() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
- assert_that(p.process(&p.bin("foo")),
- execs().with_status(0).with_stdout(""));
-
- assert_that(p.cargo("build").arg("--features").arg("bar"),
- execs().with_status(0).with_stderr(format!("\
+",
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ p.process(&p.bin("foo")),
+ execs().with_status(0).with_stdout(""),
+ );
+
+ assert_that(
+ p.cargo("build").arg("--features").arg("bar"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] bar v0.0.1 ({dir}/bar)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
- assert_that(p.process(&p.bin("foo")),
- execs().with_status(0).with_stdout("bar\n"));
+",
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ p.process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("bar\n"),
+ );
}
#[test]
fn default_feature_pulled_in() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
optional = true
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[cfg(feature = "bar")]
extern crate bar;
#[cfg(feature = "bar")]
fn main() { bar::bar(); println!("bar") }
#[cfg(not(feature = "bar"))]
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn bar() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] bar v0.0.1 ({dir}/bar)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
- assert_that(p.process(&p.bin("foo")),
- execs().with_status(0).with_stdout("bar\n"));
-
- assert_that(p.cargo("build").arg("--no-default-features"),
- execs().with_status(0).with_stderr(format!("\
+",
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ p.process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("bar\n"),
+ );
+
+ assert_that(
+ p.cargo("build").arg("--no-default-features"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
- assert_that(p.process(&p.bin("foo")),
- execs().with_status(0).with_stdout(""));
+",
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ p.process(&p.bin("foo")),
+ execs().with_status(0).with_stdout(""),
+ );
}
#[test]
fn cyclic_feature() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[features]
default = ["default"]
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] Cyclic feature dependency: feature `default` depends on itself
-"));
+",
+ ),
+ );
}
#[test]
fn cyclic_feature2() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[features]
foo = ["bar"]
bar = ["foo"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[test]
fn groups_on_groups_on_groups() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.baz]
path = "baz"
optional = true
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate bar;
#[allow(unused_extern_crates)]
extern crate baz;
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn bar() {}")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", "pub fn baz() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
}
#[test]
fn many_cli_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.baz]
path = "baz"
optional = true
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate bar;
#[allow(unused_extern_crates)]
extern crate baz;
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn bar() {}")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", "pub fn baz() {}")
.build();
- assert_that(p.cargo("build").arg("--features").arg("bar baz"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build").arg("--features").arg("bar baz"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
}
#[test]
fn union_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.d2]
path = "d2"
features = ["f2"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate d1;
extern crate d2;
d2::f1();
d2::f2();
}
- "#)
- .file("d1/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
path = "../d2"
features = ["f1"]
optional = true
- "#)
+ "#,
+ )
.file("d1/src/lib.rs", "")
- .file("d2/Cargo.toml", r#"
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.1"
[features]
f1 = []
f2 = []
- "#)
- .file("d2/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "d2/src/lib.rs",
+ r#"
#[cfg(feature = "f1")] pub fn f1() {}
#[cfg(feature = "f2")] pub fn f2() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] d2 v0.0.1 ({dir}/d2)
[COMPILING] d1 v0.0.1 ({dir}/d1)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
}
#[test]
fn many_features_no_rebuilds() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.1.0"
[dependencies.a]
path = "a"
features = ["fall"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.1.0"
ftest = []
ftest2 = []
fall = ["ftest", "ftest2"]
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] a v0.1.0 ({dir}/a)
[COMPILING] b v0.1.0 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
p.root().move_into_the_past();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[FRESH] a v0.1.0 ([..]/a)
[FRESH] b v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
// Tests that all cmd lines work with `--features ""`
#[test]
fn empty_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--features").arg(""),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--features").arg(""),
+ execs().with_status(0),
+ );
}
// Tests that all cmd lines work with `--features ""`
#[test]
fn transitive_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", "
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate bar;
fn main() { bar::baz(); }
- ")
- .file("bar/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[features]
baz = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[cfg(feature = "baz")]
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("--features").arg("foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--features").arg("foo"),
+ execs().with_status(0),
+ );
}
#[test]
fn everything_in_the_lockfile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.d3]
path = "d3"
optional = true
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("d1/Cargo.toml", r#"
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
[features]
f1 = []
- "#)
+ "#,
+ )
.file("d1/src/lib.rs", "")
- .file("d2/Cargo.toml", r#"
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.2"
authors = []
- "#)
+ "#,
+ )
.file("d2/src/lib.rs", "")
- .file("d3/Cargo.toml", r#"
+ .file(
+ "d3/Cargo.toml",
+ r#"
[package]
name = "d3"
version = "0.0.3"
[features]
f3 = []
- "#)
+ "#,
+ )
.file("d3/src/lib.rs", "")
.build();
let loc = p.root().join("Cargo.lock");
let mut lockfile = String::new();
t!(t!(File::open(&loc)).read_to_string(&mut lockfile));
- assert!(lockfile.contains(r#"name = "d1""#), "d1 not found\n{}", lockfile);
- assert!(lockfile.contains(r#"name = "d2""#), "d2 not found\n{}", lockfile);
- assert!(lockfile.contains(r#"name = "d3""#), "d3 not found\n{}", lockfile);
+ assert!(
+ lockfile.contains(r#"name = "d1""#),
+ "d1 not found\n{}",
+ lockfile
+ );
+ assert!(
+ lockfile.contains(r#"name = "d2""#),
+ "d2 not found\n{}",
+ lockfile
+ );
+ assert!(
+ lockfile.contains(r#"name = "d3""#),
+ "d3 not found\n{}",
+ lockfile
+ );
}
#[test]
fn no_rebuild_when_frobbing_default_feature() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
a = { path = "a" }
b = { path = "b" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.1.0"
[dependencies]
a = { path = "../a", features = ["f1"], default-features = false }
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.1.0"
[features]
default = ["f1"]
f1 = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
#[test]
fn unions_work_with_no_default_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
a = { path = "a" }
b = { path = "b" }
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate a;
pub fn foo() { a::a(); }
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.1.0"
[dependencies]
a = { path = "../a", features = [], default-features = false }
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.1.0"
[features]
default = ["f1"]
f1 = []
- "#)
- .file("a/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
#[cfg(feature = "f1")]
pub fn a() {}
- "#)
+ "#,
+ )
.build();
assert_that(p.cargo("build"), execs().with_status(0));
#[test]
fn optional_and_dev_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
version = "0.1.0"
foo = { path = "foo", optional = true }
[dev-dependencies]
foo = { path = "foo" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] test v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn activating_feature_activates_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
version = "0.1.0"
[features]
a = ["foo/a"]
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
pub fn bar() {
foo::bar();
}
- ")
- .file("foo/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[features]
a = []
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
#[cfg(feature = "a")]
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("--features").arg("a").arg("-v"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--features").arg("a").arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
fn dep_feature_in_cmd_line() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.derived]
path = "derived"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate derived;
fn main() { derived::test(); }
- "#)
- .file("derived/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "derived/Cargo.toml",
+ r#"
[package]
name = "derived"
version = "0.0.1"
[features]
default = []
derived-feat = ["bar/some-feat"]
- "#)
- .file("derived/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "derived/src/lib.rs",
+ r#"
extern crate bar;
pub use bar::test;
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[features]
some-feat = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[cfg(feature = "some-feat")]
pub fn test() { print!("test"); }
- "#)
+ "#,
+ )
.build();
// The foo project requires that feature "some-feat" in "bar" is enabled.
// Building without any features enabled should fail:
- assert_that(p.cargo("build"),
- execs().with_status(101));
+ assert_that(p.cargo("build"), execs().with_status(101));
// We should be able to enable the feature "derived-feat", which enables "some-feat",
// on the command line. The feature is enabled, thus building should be successful:
- assert_that(p.cargo("build").arg("--features").arg("derived/derived-feat"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build")
+ .arg("--features")
+ .arg("derived/derived-feat"),
+ execs().with_status(0),
+ );
// Trying to enable features of transitive dependencies is an error
- assert_that(p.cargo("build").arg("--features").arg("bar/some-feat"),
- execs().with_status(101).with_stderr("\
-error: Package `foo v0.0.1 ([..])` does not have these features: `bar`"));
+ assert_that(
+ p.cargo("build").arg("--features").arg("bar/some-feat"),
+ execs().with_status(101).with_stderr(
+ "\
+ error: Package `foo v0.0.1 ([..])` does not have these features: `bar`",
+ ),
+ );
// Hierarchical feature specification should still be disallowed
- assert_that(p.cargo("build").arg("--features").arg("derived/bar/some-feat"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .arg("--features")
+ .arg("derived/bar/some-feat"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] feature names may not contain slashes: `bar/some-feat`
-"));
+",
+ ),
+ );
}
#[test]
fn all_features_flag_enables_all_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.baz]
path = "baz"
optional = true
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[cfg(feature = "foo")]
pub fn foo() {}
foo();
bar();
}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", "pub fn baz() {}")
.build();
- assert_that(p.cargo("build").arg("--all-features"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--all-features"),
+ execs().with_status(0),
+ );
}
#[test]
fn many_cli_features_comma_delimited() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.baz]
path = "baz"
optional = true
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate bar;
#[allow(unused_extern_crates)]
extern crate baz;
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn bar() {}")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", "pub fn baz() {}")
.build();
- assert_that(p.cargo("build").arg("--features").arg("bar,baz"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build").arg("--features").arg("bar,baz"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
}
#[test]
fn many_cli_features_comma_and_space_delimited() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bap]
path = "bap"
optional = true
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate bar;
#[allow(unused_extern_crates)]
#[allow(unused_extern_crates)]
extern crate bap;
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn bar() {}")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", "pub fn baz() {}")
- .file("bam/Cargo.toml", r#"
+ .file(
+ "bam/Cargo.toml",
+ r#"
[package]
name = "bam"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bam/src/lib.rs", "pub fn bam() {}")
- .file("bap/Cargo.toml", r#"
+ .file(
+ "bap/Cargo.toml",
+ r#"
[package]
name = "bap"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bap/src/lib.rs", "pub fn bap() {}")
.build();
- assert_that(p.cargo("build").arg("--features").arg("bar,baz bam bap"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build").arg("--features").arg("bar,baz bam bap"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] ba[..] v0.0.1 ({dir}/ba[..])
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
}
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn no_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
mod a; fn main() {}
- "#)
+ "#,
+ )
.file("src/a.rs", "")
.build();
- assert_that(p.cargo("fetch"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("fetch"), execs().with_status(0).with_stdout(""));
}
use std::io::prelude::*;
use cargotest::sleep_ms;
-use cargotest::support::{project, execs, path2url};
+use cargotest::support::{execs, project, path2url};
use cargotest::support::paths::CargoPathExt;
use hamcrest::{assert_that, existing_file};
#[test]
fn modifying_and_moving() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
mod a; fn main() {}
- "#)
+ "#,
+ )
.file("src/a.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = path2url(p.root()))));
+",
+ dir = path2url(p.root())
+ )),
+ );
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
p.root().move_into_the_past();
p.root().join("target").move_into_the_past();
- File::create(&p.root().join("src/a.rs")).unwrap()
- .write_all(b"#[allow(unused)]fn main() {}").unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(format!("\
+ File::create(&p.root().join("src/a.rs"))
+ .unwrap()
+ .write_all(b"#[allow(unused)]fn main() {}")
+ .unwrap();
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = path2url(p.root()))));
+",
+ dir = path2url(p.root())
+ )),
+ );
fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(101));
+ assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn modify_only_some_files() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "mod a;")
.file("src/a.rs", "")
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
mod b;
fn main() {}
- "#)
+ "#,
+ )
.file("src/b.rs", "")
.file("tests/test.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = path2url(p.root()))));
- assert_that(p.cargo("test"),
- execs().with_status(0));
+",
+ dir = path2url(p.root())
+ )),
+ );
+ assert_that(p.cargo("test"), execs().with_status(0));
sleep_ms(1000);
assert_that(&p.bin("foo"), existing_file());
let lib = p.root().join("src/lib.rs");
let bin = p.root().join("src/b.rs");
- File::create(&lib).unwrap().write_all(b"invalid rust code").unwrap();
- File::create(&bin).unwrap().write_all(b"#[allow(unused)]fn foo() {}").unwrap();
+ File::create(&lib)
+ .unwrap()
+ .write_all(b"invalid rust code")
+ .unwrap();
+ File::create(&bin)
+ .unwrap()
+ .write_all(b"#[allow(unused)]fn foo() {}")
+ .unwrap();
lib.move_into_the_past();
// Make sure the binary is rebuilt, not the lib
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = path2url(p.root()))));
+",
+ dir = path2url(p.root())
+ )),
+ );
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn rebuild_sub_package_then_while_package() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
path = "a"
[dependencies.b]
path = "b"
- "#)
+ "#,
+ )
.file("src/lib.rs", "extern crate a; extern crate b;")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
authors = []
version = "0.0.1"
[dependencies.b]
path = "../b"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "extern crate b;")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
- File::create(&p.root().join("b/src/lib.rs")).unwrap().write_all(br#"
+ File::create(&p.root().join("b/src/lib.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub fn b() {}
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
- assert_that(p.cargo("build").arg("-pb"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-pb"), execs().with_status(0));
- File::create(&p.root().join("src/lib.rs")).unwrap().write_all(br#"
+ File::create(&p.root().join("src/lib.rs"))
+ .unwrap()
+ .write_all(
+ br#"
extern crate a;
extern crate b;
pub fn toplevel() {}
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn changing_lib_features_caches_targets() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[features]
foo = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[..]Compiling foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("build").arg("--features").arg("foo"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build").arg("--features").arg("foo"),
+ execs().with_status(0).with_stderr(
+ "\
[..]Compiling foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
/* Targets should be cached from the first build */
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
- assert_that(p.cargo("build").arg("--features").arg("foo"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--features").arg("foo"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn changing_profiles_caches_targets() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[profile.test]
panic = "unwind"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[..]Compiling foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("test"),
+ execs().with_status(0).with_stderr(
+ "\
[..]Compiling foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE]
[DOCTEST] foo
-"));
+",
+ ),
+ );
/* Targets should be cached from the first build */
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("test").arg("foo"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("test").arg("foo"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE]
[DOCTEST] foo
-"));
+",
+ ),
+ );
}
#[test]
fn changing_bin_paths_common_target_features_caches_targets() {
// Make sure dep_cache crate is built once per feature
let p = project("foo")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
target-dir = "./target"
- "#)
- .file("dep_crate/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "dep_crate/Cargo.toml",
+ r#"
[package]
name = "dep_crate"
version = "0.0.1"
[features]
ftest = []
- "#)
- .file("dep_crate/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "dep_crate/src/lib.rs",
+ r#"
#[cfg(feature = "ftest")]
pub fn yo() {
println!("ftest on")
pub fn yo() {
println!("ftest off")
}
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[dependencies]
dep_crate = {path = "../dep_crate", features = []}
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("a/src/main.rs", r#"
+ .file(
+ "a/src/main.rs",
+ r#"
extern crate dep_crate;
use dep_crate::yo;
fn main() {
yo();
}
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
[dependencies]
dep_crate = {path = "../dep_crate", features = ["ftest"]}
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
- .file("b/src/main.rs", r#"
+ .file(
+ "b/src/main.rs",
+ r#"
extern crate dep_crate;
use dep_crate::yo;
fn main() {
yo();
}
- "#)
+ "#,
+ )
.build();
/* Build and rebuild a/. Ensure dep_crate only builds once */
- assert_that(p.cargo("run").cwd(p.root().join("a")),
- execs().with_status(0)
- .with_stdout("ftest off")
- .with_stderr("\
+ assert_that(
+ p.cargo("run").cwd(p.root().join("a")),
+ execs().with_status(0).with_stdout("ftest off").with_stderr(
+ "\
[..]Compiling dep_crate v0.0.1 ([..])
[..]Compiling a v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..]target[/]debug[/]a[EXE]`
-"));
- assert_that(p.cargo("clean").arg("-p").arg("a").cwd(p.root().join("a")),
- execs().with_status(0));
- assert_that(p.cargo("run").cwd(p.root().join("a")),
- execs().with_status(0)
- .with_stdout("ftest off")
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("clean").arg("-p").arg("a").cwd(p.root().join("a")),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("run").cwd(p.root().join("a")),
+ execs().with_status(0).with_stdout("ftest off").with_stderr(
+ "\
[..]Compiling a v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..]target[/]debug[/]a[EXE]`
-"));
+",
+ ),
+ );
/* Build and rebuild b/. Ensure dep_crate only builds once */
- assert_that(p.cargo("run").cwd(p.root().join("b")),
- execs().with_status(0)
- .with_stdout("ftest on")
- .with_stderr("\
+ assert_that(
+ p.cargo("run").cwd(p.root().join("b")),
+ execs().with_status(0).with_stdout("ftest on").with_stderr(
+ "\
[..]Compiling dep_crate v0.0.1 ([..])
[..]Compiling b v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..]target[/]debug[/]b[EXE]`
-"));
- assert_that(p.cargo("clean").arg("-p").arg("b").cwd(p.root().join("b")),
- execs().with_status(0));
- assert_that(p.cargo("run").cwd(p.root().join("b")),
- execs().with_status(0)
- .with_stdout("ftest on")
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("clean").arg("-p").arg("b").cwd(p.root().join("b")),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("run").cwd(p.root().join("b")),
+ execs().with_status(0).with_stdout("ftest on").with_stderr(
+ "\
[..]Compiling b v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..]target[/]debug[/]b[EXE]`
-"));
+",
+ ),
+ );
/* Build a/ package again. If we cache different feature dep builds correctly,
* this should not cause a rebuild of dep_crate */
- assert_that(p.cargo("clean").arg("-p").arg("a").cwd(p.root().join("a")),
- execs().with_status(0));
- assert_that(p.cargo("run").cwd(p.root().join("a")),
- execs().with_status(0)
- .with_stdout("ftest off")
- .with_stderr("\
+ assert_that(
+ p.cargo("clean").arg("-p").arg("a").cwd(p.root().join("a")),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("run").cwd(p.root().join("a")),
+ execs().with_status(0).with_stdout("ftest off").with_stderr(
+ "\
[..]Compiling a v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..]target[/]debug[/]a[EXE]`
-"));
+",
+ ),
+ );
/* Build b/ package again. If we cache different feature dep builds correctly,
* this should not cause a rebuild */
- assert_that(p.cargo("clean").arg("-p").arg("b").cwd(p.root().join("b")),
- execs().with_status(0));
- assert_that(p.cargo("run").cwd(p.root().join("b")),
- execs().with_status(0)
- .with_stdout("ftest on")
- .with_stderr("\
+ assert_that(
+ p.cargo("clean").arg("-p").arg("b").cwd(p.root().join("b")),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("run").cwd(p.root().join("b")),
+ execs().with_status(0).with_stdout("ftest on").with_stderr(
+ "\
[..]Compiling b v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..]target[/]debug[/]b[EXE]`
-"));
+",
+ ),
+ );
}
#[test]
fn changing_bin_features_caches_targets() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[features]
foo = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
let msg = if cfg!(feature = "foo") { "feature on" } else { "feature off" };
println!("{}", msg);
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run"),
- execs().with_status(0)
- .with_stdout("feature off")
- .with_stderr("\
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_status(0)
+ .with_stdout("feature off")
+ .with_stderr(
+ "\
[..]Compiling foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]foo[EXE]`
-"));
-
- assert_that(p.cargo("run").arg("--features").arg("foo"),
- execs().with_status(0)
- .with_stdout("feature on")
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("run").arg("--features").arg("foo"),
+ execs()
+ .with_status(0)
+ .with_stdout("feature on")
+ .with_stderr(
+ "\
[..]Compiling foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]foo[EXE]`
-"));
+",
+ ),
+ );
/* Targets should be cached from the first build */
- assert_that(p.cargo("run"),
- execs().with_status(0)
- .with_stdout("feature off")
- .with_stderr("\
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_status(0)
+ .with_stdout("feature off")
+ .with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]foo[EXE]`
-"));
-
- assert_that(p.cargo("run").arg("--features").arg("foo"),
- execs().with_status(0)
- .with_stdout("feature on")
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("run").arg("--features").arg("foo"),
+ execs()
+ .with_status(0)
+ .with_stdout("feature on")
+ .with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]foo[EXE]`
-"));
+",
+ ),
+ );
}
#[test]
fn rebuild_tests_if_lib_changes() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
- .file("tests/foo.rs", r#"
+ .file(
+ "tests/foo.rs",
+ r#"
extern crate foo;
#[test]
fn test() { foo::foo(); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(p.cargo("test"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(p.cargo("test"), execs().with_status(0));
sleep_ms(1000);
File::create(&p.root().join("src/lib.rs")).unwrap();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(101));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(101));
}
#[test]
fn no_rebuild_transitive_target_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
a = { path = "a" }
[dev-dependencies]
b = { path = "b" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("tests/foo.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[target.foo.dependencies]
c = { path = "../c" }
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
[dependencies]
c = { path = "../c" }
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
- .file("c/Cargo.toml", r#"
+ .file(
+ "c/Cargo.toml",
+ r#"
[package]
name = "c"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("c/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(p.cargo("test").arg("--no-run"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(
+ p.cargo("test").arg("--no-run"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] c v0.0.1 ([..])
[COMPILING] b v0.0.1 ([..])
[COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn rerun_if_changed_in_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
a = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("a/build.rs", r#"
+ "#,
+ )
+ .file(
+ "a/build.rs",
+ r#"
fn main() {
println!("cargo:rerun-if-changed=build.rs");
}
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[test]
fn same_build_dir_cached_packages() {
let p = project("foo")
- .file("a1/Cargo.toml", r#"
+ .file(
+ "a1/Cargo.toml",
+ r#"
[package]
name = "a1"
version = "0.0.1"
authors = []
[dependencies]
b = { path = "../b" }
- "#)
+ "#,
+ )
.file("a1/src/lib.rs", "")
- .file("a2/Cargo.toml", r#"
+ .file(
+ "a2/Cargo.toml",
+ r#"
[package]
name = "a2"
version = "0.0.1"
authors = []
[dependencies]
b = { path = "../b" }
- "#)
+ "#,
+ )
.file("a2/src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
authors = []
[dependencies]
c = { path = "../c" }
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
- .file("c/Cargo.toml", r#"
+ .file(
+ "c/Cargo.toml",
+ r#"
[package]
name = "c"
version = "0.0.1"
authors = []
[dependencies]
d = { path = "../d" }
- "#)
+ "#,
+ )
.file("c/src/lib.rs", "")
- .file("d/Cargo.toml", r#"
+ .file(
+ "d/Cargo.toml",
+ r#"
[package]
name = "d"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("d/src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
target-dir = "./target"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").cwd(p.root().join("a1")),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").cwd(p.root().join("a1")),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] d v0.0.1 ({dir}/d)
[COMPILING] c v0.0.1 ({dir}/c)
[COMPILING] b v0.0.1 ({dir}/b)
[COMPILING] a1 v0.0.1 ({dir}/a1)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
- assert_that(p.cargo("build").cwd(p.root().join("a2")),
- execs().with_status(0).with_stderr(&format!("\
+",
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ p.cargo("build").cwd(p.root().join("a2")),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] a2 v0.0.1 ({dir}/a2)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
}
#[test]
fn no_rebuild_if_build_artifacts_move_backwards_in_time() {
let p = project("backwards_in_time")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "backwards_in_time"
version = "0.0.1"
[dependencies]
a = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
p.root().move_into_the_past();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stdout("").with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stdout("").with_stderr(
+ "\
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn rebuild_if_build_artifacts_move_forward_in_time() {
let p = project("forwards_in_time")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "forwards_in_time"
version = "0.0.1"
[dependencies]
a = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
p.root().move_into_the_future();
- assert_that(p.cargo("build").env("RUST_LOG", ""),
- execs().with_status(0).with_stdout("").with_stderr("\
+ assert_that(
+ p.cargo("build").env("RUST_LOG", ""),
+ execs().with_status(0).with_stdout("").with_stderr(
+ "\
[COMPILING] a v0.0.1 ([..])
[COMPILING] forwards_in_time v0.0.1 ([..])
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn rebuild_if_environment_changes() {
let p = project("env_change")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "env_change"
description = "old desc"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
println!("{}", env!("CARGO_PKG_DESCRIPTION"));
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run"),
- execs().with_status(0)
- .with_stdout("old desc").with_stderr(&format!("\
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_status(0)
+ .with_stdout("old desc")
+ .with_stderr(&format!(
+ "\
[COMPILING] env_change v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]env_change[EXE]`
-", dir = p.url())));
-
- File::create(&p.root().join("Cargo.toml")).unwrap().write_all(br#"
+",
+ dir = p.url()
+ )),
+ );
+
+ File::create(&p.root().join("Cargo.toml"))
+ .unwrap()
+ .write_all(
+ br#"
[package]
name = "env_change"
description = "new desc"
version = "0.0.1"
authors = []
- "#).unwrap();
-
- assert_that(p.cargo("run"),
- execs().with_status(0)
- .with_stdout("new desc").with_stderr(&format!("\
+ "#,
+ )
+ .unwrap();
+
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_status(0)
+ .with_stdout("new desc")
+ .with_stderr(&format!(
+ "\
[COMPILING] env_change v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]env_change[EXE]`
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
}
#[test]
fn no_rebuild_when_rename_dir() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies]
foo = { path = "foo" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
.build();
new.push("bar");
fs::rename(p.root(), &new).unwrap();
- assert_that(p.cargo("build").cwd(&new),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").cwd(&new),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
use std::fs::{self, File};
use std::io::prelude::*;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use cargotest::support::registry::Package;
use cargotest::ChannelChanger;
use hamcrest::{assert_that, existing_file, is_not};
#[test]
fn adding_and_removing_packages() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
let toml = p.root().join("Cargo.toml");
let lock1 = p.read_lockfile();
// add a dep
- File::create(&toml).unwrap().write_all(br#"
+ File::create(&toml)
+ .unwrap()
+ .write_all(
+ br#"
[package]
name = "foo"
authors = []
[dependencies.bar]
path = "bar"
- "#).unwrap();
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
+ "#,
+ )
+ .unwrap();
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
let lock2 = p.read_lockfile();
assert_ne!(lock1, lock2);
// change the dep
- File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#"
+ File::create(&p.root().join("bar/Cargo.toml"))
+ .unwrap()
+ .write_all(
+ br#"
[package]
name = "bar"
authors = []
version = "0.0.2"
- "#).unwrap();
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
+ "#,
+ )
+ .unwrap();
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
let lock3 = p.read_lockfile();
assert_ne!(lock1, lock3);
assert_ne!(lock2, lock3);
// remove the dep
println!("lock4");
- File::create(&toml).unwrap().write_all(br#"
+ File::create(&toml)
+ .unwrap()
+ .write_all(
+ br#"
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#).unwrap();
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
+ "#,
+ )
+ .unwrap();
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
let lock4 = p.read_lockfile();
assert_eq!(lock1, lock4);
}
Package::new("serde", "1.0.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[dependencies]
serde = "1.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("generate-lockfile"),
- execs().with_stderr("\
+ assert_that(
+ p.cargo("generate-lockfile"),
+ execs().with_stderr(
+ "\
[UPDATING] registry `[..]`
-"));
+",
+ ),
+ );
- assert_that(p.cargo("generate-lockfile").masquerade_as_nightly_cargo().arg("-Zno-index-update"),
- execs().with_status(0).with_stdout("").with_stderr(""));
+ assert_that(
+ p.cargo("generate-lockfile")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zno-index-update"),
+ execs().with_status(0).with_stdout("").with_stderr(""),
+ );
}
#[test]
fn preserve_metadata() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
let metadata = r#"
[metadata]
let lockfile = p.root().join("Cargo.lock");
let lock = p.read_lockfile();
let data = lock + metadata;
- File::create(&lockfile).unwrap().write_all(data.as_bytes()).unwrap();
+ File::create(&lockfile)
+ .unwrap()
+ .write_all(data.as_bytes())
+ .unwrap();
// Build and make sure the metadata is still there
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
let lock = p.read_lockfile();
assert!(lock.contains(metadata.trim()), "{}", lock);
// Update and make sure the metadata is still there
- assert_that(p.cargo("update"),
- execs().with_status(0));
+ assert_that(p.cargo("update"), execs().with_status(0));
let lock = p.read_lockfile();
assert!(lock.contains(metadata.trim()), "{}", lock);
}
#[test]
fn preserve_line_endings_issue_2076() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
let lockfile = p.root().join("Cargo.lock");
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
- assert_that(&lockfile,
- existing_file());
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
+ assert_that(&lockfile, existing_file());
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
let lock0 = p.read_lockfile();
let lock1 = lock0.replace("\n", "\r\n");
{
- File::create(&lockfile).unwrap().write_all(lock1.as_bytes()).unwrap();
+ File::create(&lockfile)
+ .unwrap()
+ .write_all(lock1.as_bytes())
+ .unwrap();
}
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
let lock2 = p.read_lockfile();
#[test]
fn cargo_update_generate_lockfile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
assert_that(&lockfile, is_not(existing_file()));
assert_that(p.cargo("update"), execs().with_status(0).with_stdout(""));
assert_that(&lockfile, existing_file());
-
}
use cargo::util::process;
use cargotest::sleep_ms;
use cargotest::support::paths::{self, CargoPathExt};
-use cargotest::support::{git, project, execs, main_file, path2url};
+use cargotest::support::{execs, git, main_file, project, path2url};
use cargotest::ChannelChanger;
-use hamcrest::{assert_that,existing_file};
+use hamcrest::{assert_that, existing_file};
#[test]
fn cargo_compile_simple_git_dep() {
let project = project("foo");
let git_project = git::new("dep1", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep1"
[lib]
name = "dep1"
- "#)
- .file("src/dep1.rs", r#"
+ "#,
+ )
+ .file(
+ "src/dep1.rs",
+ r#"
pub fn hello() -> &'static str {
"hello world"
}
- "#)
+ "#,
+ )
}).unwrap();
let project = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
[dependencies.dep1]
git = '{}'
- "#, git_project.url()))
- .file("src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]))
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
.build();
let root = project.root();
let git_root = git_project.root();
- assert_that(project.cargo("build"),
- execs()
- .with_stderr(&format!("[UPDATING] git repository `{}`\n\
- [COMPILING] dep1 v0.5.0 ({}#[..])\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
- path2url(git_root.clone()),
- path2url(git_root),
- path2url(root))));
+ assert_that(
+ project.cargo("build"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [COMPILING] dep1 v0.5.0 ({}#[..])\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ path2url(git_root.clone()),
+ path2url(git_root),
+ path2url(root)
+ )),
+ );
assert_that(&project.bin("foo"), existing_file());
assert_that(
- process(&project.bin("foo")),
- execs().with_stdout("hello world\n"));
+ process(&project.bin("foo")),
+ execs().with_stdout("hello world\n"),
+ );
}
#[test]
fn cargo_compile_forbird_git_httpsrepo_offline() {
-
let p = project("need_remote_repo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "need_remote_repo"
[dependencies.dep1]
git = 'https://github.com/some_user/dep1.git'
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
-
assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(101).
with_stderr("\
can't checkout from 'https://github.com/some_user/dep1.git': you are in the offline mode (-Z offline)"));
}
-
#[test]
fn cargo_compile_offline_with_cached_git_dep() {
let git_project = git::new("dep1", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep1"
version = "0.5.0"
authors = ["chabapok@example.com"]
[lib]
- name = "dep1""#)
- .file("src/lib.rs", r#"
+ name = "dep1""#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub static COOL_STR:&str = "cached git repo rev1";
- "#)
+ "#,
+ )
}).unwrap();
let repo = git2::Repository::open(&git_project.root()).unwrap();
let rev1 = repo.revparse_single("HEAD").unwrap().id();
// Commit the changes and make sure we trigger a recompile
- File::create(&git_project.root().join("src/lib.rs")).unwrap().write_all(br#"
+ File::create(&git_project.root().join("src/lib.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub static COOL_STR:&str = "cached git repo rev2";
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
git::add(&repo);
let rev2 = git::commit(&repo);
{
// cache to regisrty rev1 and rev2
let prj = project("cache_git_dep")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "cache_git_dep"
version = "0.5.0"
[dependencies.dep1]
git = '{}'
rev = "{}"
- "#, git_project.url(), rev1.clone()))
+ "#,
+ git_project.url(),
+ rev1.clone()
+ ),
+ )
.file("src/main.rs", "fn main(){}")
.build();
assert_that(prj.cargo("build"), execs().with_status(0));
- File::create(&prj.root().join("Cargo.toml")).unwrap().write_all(
- &format!(r#"
+ File::create(&prj.root().join("Cargo.toml"))
+ .unwrap()
+ .write_all(&format!(
+ r#"
[project]
name = "cache_git_dep"
version = "0.5.0"
[dependencies.dep1]
git = '{}'
rev = "{}"
- "#, git_project.url(), rev2.clone()).as_bytes()
- ).unwrap();
+ "#,
+ git_project.url(),
+ rev2.clone()
+ ).as_bytes())
+ .unwrap();
assert_that(prj.cargo("build"), execs().with_status(0));
}
let project = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies.dep1]
git = '{}'
- "#, git_project.url()))
- .file("src/main.rs", &main_file(r#""hello from {}", dep1::COOL_STR"#, &["dep1"]))
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""hello from {}", dep1::COOL_STR"#, &["dep1"]),
+ )
.build();
let root = project.root();
let git_root = git_project.root();
- assert_that(project.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
- execs().with_stderr(format!("\
+ assert_that(
+ project
+ .cargo("build")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zoffline"),
+ execs().with_stderr(format!(
+ "\
[COMPILING] dep1 v0.5.0 ({}#[..])
[COMPILING] foo v0.5.0 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
- path2url(git_root),
- path2url(root)
- )));
+ path2url(git_root),
+ path2url(root)
+ )),
+ );
assert_that(&project.bin("foo"), existing_file());
- assert_that(process(&project.bin("foo")),
- execs().with_stdout("hello from cached git repo rev2\n"));
-
- drop( File::create(&project.root().join("Cargo.toml")).unwrap()
- .write_all(&format!(r#"
+ assert_that(
+ process(&project.bin("foo")),
+ execs().with_stdout("hello from cached git repo rev2\n"),
+ );
+
+ drop(
+ File::create(&project.root().join("Cargo.toml"))
+ .unwrap()
+ .write_all(&format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies.dep1]
git = '{}'
rev = "{}"
- "#, git_project.url(), rev1).as_bytes()).unwrap() );
-
- let _out = project.cargo("build").masquerade_as_nightly_cargo()
- .arg("-Zoffline").exec_with_output();
- assert_that(process(&project.bin("foo")),
- execs().with_stdout("hello from cached git repo rev1\n"));
+ "#,
+ git_project.url(),
+ rev1
+ ).as_bytes())
+ .unwrap(),
+ );
+
+ let _out = project
+ .cargo("build")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zoffline")
+ .exec_with_output();
+ assert_that(
+ process(&project.bin("foo")),
+ execs().with_stdout("hello from cached git repo rev1\n"),
+ );
}
-
#[test]
fn cargo_compile_git_dep_branch() {
let project = project("foo");
let git_project = git::new("dep1", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep1"
[lib]
name = "dep1"
- "#)
- .file("src/dep1.rs", r#"
+ "#,
+ )
+ .file(
+ "src/dep1.rs",
+ r#"
pub fn hello() -> &'static str {
"hello world"
}
- "#)
+ "#,
+ )
}).unwrap();
// Make a new branch based on the current HEAD commit
repo.branch("branchy", &head, true).unwrap();
let project = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
git = '{}'
branch = "branchy"
- "#, git_project.url()))
- .file("src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]))
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
.build();
let root = project.root();
let git_root = git_project.root();
- assert_that(project.cargo("build"),
- execs()
- .with_stderr(&format!("[UPDATING] git repository `{}`\n\
- [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
- path2url(git_root.clone()),
- path2url(git_root),
- path2url(root))));
+ assert_that(
+ project.cargo("build"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ path2url(git_root.clone()),
+ path2url(git_root),
+ path2url(root)
+ )),
+ );
assert_that(&project.bin("foo"), existing_file());
assert_that(
- process(&project.bin("foo")),
- execs().with_stdout("hello world\n"));
+ process(&project.bin("foo")),
+ execs().with_stdout("hello world\n"),
+ );
}
#[test]
let project = project("foo");
let git_project = git::new("dep1", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep1"
[lib]
name = "dep1"
- "#)
- .file("src/dep1.rs", r#"
+ "#,
+ )
+ .file(
+ "src/dep1.rs",
+ r#"
pub fn hello() -> &'static str {
"hello world"
}
- "#)
+ "#,
+ )
}).unwrap();
// Make a tag corresponding to the current HEAD
let repo = git2::Repository::open(&git_project.root()).unwrap();
let head = repo.head().unwrap().target().unwrap();
- repo.tag("v0.1.0",
- &repo.find_object(head, None).unwrap(),
- &repo.signature().unwrap(),
- "make a new tag",
- false).unwrap();
+ repo.tag(
+ "v0.1.0",
+ &repo.find_object(head, None).unwrap(),
+ &repo.signature().unwrap(),
+ "make a new tag",
+ false,
+ ).unwrap();
let project = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
git = '{}'
tag = "v0.1.0"
- "#, git_project.url()))
- .file("src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]))
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
.build();
let root = project.root();
let git_root = git_project.root();
- assert_that(project.cargo("build"),
- execs()
- .with_stderr(&format!("[UPDATING] git repository `{}`\n\
- [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
- path2url(git_root.clone()),
- path2url(git_root),
- path2url(root))));
+ assert_that(
+ project.cargo("build"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ path2url(git_root.clone()),
+ path2url(git_root),
+ path2url(root)
+ )),
+ );
assert_that(&project.bin("foo"), existing_file());
- assert_that(process(&project.bin("foo")),
- execs().with_stdout("hello world\n"));
+ assert_that(
+ process(&project.bin("foo")),
+ execs().with_stdout("hello world\n"),
+ );
- assert_that(project.cargo("build"),
- execs().with_status(0));
+ assert_that(project.cargo("build"), execs().with_status(0));
}
#[test]
fn cargo_compile_with_nested_paths() {
let git_project = git::new("dep1", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep1"
[lib]
name = "dep1"
- "#)
- .file("src/dep1.rs", r#"
+ "#,
+ )
+ .file(
+ "src/dep1.rs",
+ r#"
extern crate dep2;
pub fn hello() -> &'static str {
dep2::hello()
}
- "#)
- .file("vendor/dep2/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "vendor/dep2/Cargo.toml",
+ r#"
[project]
name = "dep2"
[lib]
name = "dep2"
- "#)
- .file("vendor/dep2/src/dep2.rs", r#"
+ "#,
+ )
+ .file(
+ "vendor/dep2/src/dep2.rs",
+ r#"
pub fn hello() -> &'static str {
"hello world"
}
- "#)
+ "#,
+ )
}).unwrap();
let p = project("parent")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "parent"
[[bin]]
name = "parent"
- "#, git_project.url()))
- .file("src/parent.rs",
- &main_file(r#""{}", dep1::hello()"#, &["dep1"]))
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/parent.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
.build();
- p.cargo("build")
- .exec_with_output()
- .unwrap();
+ p.cargo("build").exec_with_output().unwrap();
assert_that(&p.bin("parent"), existing_file());
- assert_that(process(&p.bin("parent")),
- execs().with_stdout("hello world\n"));
+ assert_that(
+ process(&p.bin("parent")),
+ execs().with_stdout("hello world\n"),
+ );
}
#[test]
fn cargo_compile_with_malformed_nested_paths() {
let git_project = git::new("dep1", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep1"
[lib]
name = "dep1"
- "#)
- .file("src/dep1.rs", r#"
+ "#,
+ )
+ .file(
+ "src/dep1.rs",
+ r#"
pub fn hello() -> &'static str {
"hello world"
}
- "#)
- .file("vendor/dep2/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "vendor/dep2/Cargo.toml",
+ r#"
!INVALID!
- "#)
+ "#,
+ )
}).unwrap();
let p = project("parent")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "parent"
[[bin]]
name = "parent"
- "#, git_project.url()))
- .file("src/parent.rs",
- &main_file(r#""{}", dep1::hello()"#, &["dep1"]))
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/parent.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
.build();
- p.cargo("build")
- .exec_with_output()
- .unwrap();
+ p.cargo("build").exec_with_output().unwrap();
assert_that(&p.bin("parent"), existing_file());
- assert_that(process(&p.bin("parent")),
- execs().with_stdout("hello world\n"));
+ assert_that(
+ process(&p.bin("parent")),
+ execs().with_stdout("hello world\n"),
+ );
}
#[test]
fn cargo_compile_with_meta_package() {
let git_project = git::new("meta-dep", |project| {
project
- .file("dep1/Cargo.toml", r#"
+ .file(
+ "dep1/Cargo.toml",
+ r#"
[project]
name = "dep1"
[lib]
name = "dep1"
- "#)
- .file("dep1/src/dep1.rs", r#"
+ "#,
+ )
+ .file(
+ "dep1/src/dep1.rs",
+ r#"
pub fn hello() -> &'static str {
"this is dep1"
}
- "#)
- .file("dep2/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "dep2/Cargo.toml",
+ r#"
[project]
name = "dep2"
[lib]
name = "dep2"
- "#)
- .file("dep2/src/dep2.rs", r#"
+ "#,
+ )
+ .file(
+ "dep2/src/dep2.rs",
+ r#"
pub fn hello() -> &'static str {
"this is dep2"
}
- "#)
+ "#,
+ )
}).unwrap();
let p = project("parent")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "parent"
[[bin]]
name = "parent"
- "#, git_project.url(), git_project.url()))
- .file("src/parent.rs",
- &main_file(r#""{} {}", dep1::hello(), dep2::hello()"#, &["dep1", "dep2"]))
+ "#,
+ git_project.url(),
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/parent.rs",
+ &main_file(
+ r#""{} {}", dep1::hello(), dep2::hello()"#,
+ &["dep1", "dep2"],
+ ),
+ )
.build();
- p.cargo("build")
- .exec_with_output()
- .unwrap();
+ p.cargo("build").exec_with_output().unwrap();
assert_that(&p.bin("parent"), existing_file());
- assert_that(process(&p.bin("parent")),
- execs().with_stdout("this is dep1 this is dep2\n"));
+ assert_that(
+ process(&p.bin("parent")),
+ execs().with_stdout("this is dep1 this is dep2\n"),
+ );
}
#[test]
let url = "git@github.com:a/dep";
let project = project("project")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
[[bin]]
name = "foo"
- "#, url))
- .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]))
+ "#,
+ url
+ ),
+ )
+ .file(
+ "src/foo.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
.build();
- assert_that(project.cargo("build"),
- execs()
- .with_stdout("")
- .with_stderr(&format!("\
+ assert_that(
+ project.cargo("build"),
+ execs().with_stdout("").with_stderr(&format!(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
invalid url `{}`: relative URL without a base
-", url)));
+",
+ url
+ )),
+ );
}
#[test]
fn two_revs_same_deps() {
let bar = git::new("meta-dep", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.0"
authors = []
- "#)
- .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ "#,
+ )
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
}).unwrap();
let repo = git2::Repository::open(&bar.root()).unwrap();
let rev1 = repo.revparse_single("HEAD").unwrap().id();
// Commit the changes and make sure we trigger a recompile
- File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#"
+ File::create(&bar.root().join("src/lib.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub fn bar() -> i32 { 2 }
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
git::add(&repo);
let rev2 = git::commit(&repo);
let foo = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.0.0"
[dependencies.baz]
path = "../baz"
- "#, bar.url(), rev1))
- .file("src/main.rs", r#"
+ "#,
+ bar.url(),
+ rev1
+ ),
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
extern crate baz;
assert_eq!(bar::bar(), 1);
assert_eq!(baz::baz(), 2);
}
- "#)
+ "#,
+ )
.build();
let _baz = project("baz")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "baz"
version = "0.0.0"
[dependencies.bar]
git = '{}'
rev = "{}"
- "#, bar.url(), rev2))
- .file("src/lib.rs", r#"
+ "#,
+ bar.url(),
+ rev2
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate bar;
pub fn baz() -> i32 { bar::bar() }
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(foo.cargo("build").arg("-v"), execs().with_status(0));
assert_that(&foo.bin("foo"), existing_file());
assert_that(foo.process(&foo.bin("foo")), execs().with_status(0));
}
fn recompilation() {
let git_project = git::new("bar", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
- "#)
- .file("src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bar.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
git = '{}'
- "#, git_project.url()))
- .file("src/main.rs",
- &main_file(r#""{:?}", bar::bar()"#, &["bar"]))
+ "#,
+ git_project.url()
+ ),
+ )
+ .file("src/main.rs", &main_file(r#""{:?}", bar::bar()"#, &["bar"]))
.build();
// First time around we should compile both foo and bar
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[UPDATING] git repository `{}`\n\
- [COMPILING] bar v0.5.0 ({}#[..])\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- git_project.url(),
- git_project.url(),
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [COMPILING] bar v0.5.0 ({}#[..])\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ git_project.url(),
+ git_project.url(),
+ p.url()
+ )),
+ );
// Don't recompile the second time
- assert_that(p.cargo("build"),
- execs().with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_stdout(""));
// Modify a file manually, shouldn't trigger a recompile
- File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#"
+ File::create(&git_project.root().join("src/bar.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub fn bar() { println!("hello!"); }
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
- assert_that(p.cargo("build"),
- execs().with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_stdout(""));
- assert_that(p.cargo("update"),
- execs().with_stderr(&format!("[UPDATING] git repository `{}`",
- git_project.url())));
+ assert_that(
+ p.cargo("update"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`",
+ git_project.url()
+ )),
+ );
- assert_that(p.cargo("build"),
- execs().with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_stdout(""));
// Commit the changes and make sure we don't trigger a recompile because the
// lockfile says not to change
git::commit(&repo);
println!("compile after commit");
- assert_that(p.cargo("build"),
- execs().with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_stdout(""));
p.root().move_into_the_past();
// Update the dependency and carry on!
- assert_that(p.cargo("update"),
- execs().with_stderr(&format!("[UPDATING] git repository `{}`\n\
- [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\
- ",
- git_project.url())));
+ assert_that(
+ p.cargo("update"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\
+ ",
+ git_project.url()
+ )),
+ );
println!("going for the last compile");
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}#[..])\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- git_project.url(),
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] bar v0.5.0 ({}#[..])\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ git_project.url(),
+ p.url()
+ )),
+ );
// Make sure clean only cleans one dep
- assert_that(p.cargo("clean")
- .arg("-p").arg("foo"),
- execs().with_stdout(""));
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url())));
+ assert_that(
+ p.cargo("clean").arg("-p").arg("foo"),
+ execs().with_stdout(""),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url()
+ )),
+ );
}
#[test]
fn update_with_shared_deps() {
let git_project = git::new("bar", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
- "#)
- .file("src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bar.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.5.0"
path = "dep1"
[dependencies.dep2]
path = "dep2"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate dep1;
#[allow(unused_extern_crates)]
extern crate dep2;
fn main() {}
- "#)
- .file("dep1/Cargo.toml", &format!(r#"
+ "#,
+ )
+ .file(
+ "dep1/Cargo.toml",
+ &format!(
+ r#"
[package]
name = "dep1"
version = "0.5.0"
[dependencies.bar]
version = "0.5.0"
git = '{}'
- "#, git_project.url()))
+ "#,
+ git_project.url()
+ ),
+ )
.file("dep1/src/lib.rs", "")
- .file("dep2/Cargo.toml", &format!(r#"
+ .file(
+ "dep2/Cargo.toml",
+ &format!(
+ r#"
[package]
name = "dep2"
version = "0.5.0"
[dependencies.bar]
version = "0.5.0"
git = '{}'
- "#, git_project.url()))
+ "#,
+ git_project.url()
+ ),
+ )
.file("dep2/src/lib.rs", "")
.build();
// First time around we should compile both foo and bar
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "\
[UPDATING] git repository `{git}`
[COMPILING] bar v0.5.0 ({git}#[..])
[COMPILING] [..] v0.5.0 ([..])
[COMPILING] [..] v0.5.0 ([..])
[COMPILING] foo v0.5.0 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
-git = git_project.url(), dir = p.url())));
+ git = git_project.url(),
+ dir = p.url()
+ )),
+ );
// Modify a file manually, and commit it
- File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#"
+ File::create(&git_project.root().join("src/bar.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub fn bar() { println!("hello!"); }
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
let repo = git2::Repository::open(&git_project.root()).unwrap();
let old_head = repo.head().unwrap().target().unwrap();
git::add(&repo);
// By default, not transitive updates
println!("dep1 update");
- assert_that(p.cargo("update")
- .arg("-p").arg("dep1"),
- execs().with_stdout(""));
+ assert_that(
+ p.cargo("update").arg("-p").arg("dep1"),
+ execs().with_stdout(""),
+ );
// Don't do anything bad on a weird --precise argument
println!("bar bad precise update");
- assert_that(p.cargo("update")
- .arg("-p").arg("bar")
- .arg("--precise").arg("0.1.2"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("update")
+ .arg("-p")
+ .arg("bar")
+ .arg("--precise")
+ .arg("0.1.2"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] git repository [..]
[ERROR] Unable to update [..]
Caused by:
revspec '0.1.2' not found; [..]
-"));
+",
+ ),
+ );
// Specifying a precise rev to the old rev shouldn't actually update
// anything because we already have the rev in the db.
println!("bar precise update");
- assert_that(p.cargo("update")
- .arg("-p").arg("bar")
- .arg("--precise").arg(&old_head.to_string()),
- execs().with_stdout(""));
+ assert_that(
+ p.cargo("update")
+ .arg("-p")
+ .arg("bar")
+ .arg("--precise")
+ .arg(&old_head.to_string()),
+ execs().with_stdout(""),
+ );
// Updating aggressively should, however, update the repo.
println!("dep1 aggressive update");
- assert_that(p.cargo("update")
- .arg("-p").arg("dep1")
- .arg("--aggressive"),
- execs().with_stderr(&format!("[UPDATING] git repository `{}`\n\
- [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\
- ", git_project.url())));
+ assert_that(
+ p.cargo("update").arg("-p").arg("dep1").arg("--aggressive"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\
+ ",
+ git_project.url()
+ )),
+ );
// Make sure we still only compile one version of the git repo
println!("build");
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "\
[COMPILING] bar v0.5.0 ({git}#[..])
[COMPILING] [..] v0.5.0 ({dir}[..]dep[..])
[COMPILING] [..] v0.5.0 ({dir}[..]dep[..])
[COMPILING] foo v0.5.0 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
- git = git_project.url(), dir = p.url())));
+ git = git_project.url(),
+ dir = p.url()
+ )),
+ );
// We should be able to update transitive deps
- assert_that(p.cargo("update").arg("-p").arg("bar"),
- execs().with_stderr(&format!("[UPDATING] git repository `{}`",
- git_project.url())));
+ assert_that(
+ p.cargo("update").arg("-p").arg("bar"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`",
+ git_project.url()
+ )),
+ );
}
#[test]
fn dep_with_submodule() {
let project = project("foo");
let git_project = git::new("dep1", |project| {
- project
- .file("Cargo.toml", r#"
+ project.file(
+ "Cargo.toml",
+ r#"
[package]
name = "dep1"
version = "0.5.0"
authors = ["carlhuda@example.com"]
- "#)
- }).unwrap();
- let git_project2 = git::new("dep2", |project| {
- project.file("lib.rs", "pub fn dep() {}")
+ "#,
+ )
}).unwrap();
+ let git_project2 =
+ git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")).unwrap();
let repo = git2::Repository::open(&git_project.root()).unwrap();
let url = path2url(git_project2.root()).to_string();
git::commit(&repo);
let project = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
[dependencies.dep1]
git = '{}'
- "#, git_project.url()))
- .file("src/lib.rs", "
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate dep1;
pub fn foo() { dep1::dep() }
- ")
+ ",
+ )
.build();
- assert_that(project.cargo("build"),
- execs().with_stderr("\
+ assert_that(
+ project.cargo("build"),
+ execs()
+ .with_stderr(
+ "\
[UPDATING] git repository [..]
[COMPILING] dep1 [..]
[COMPILING] foo [..]
-[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n").with_status(0));
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ )
+ .with_status(0),
+ );
}
#[test]
fn dep_with_bad_submodule() {
let project = project("foo");
let git_project = git::new("dep1", |project| {
- project
- .file("Cargo.toml", r#"
+ project.file(
+ "Cargo.toml",
+ r#"
[package]
name = "dep1"
version = "0.5.0"
authors = ["carlhuda@example.com"]
- "#)
- }).unwrap();
- let git_project2 = git::new("dep2", |project| {
- project.file("lib.rs", "pub fn dep() {}")
+ "#,
+ )
}).unwrap();
+ let git_project2 =
+ git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")).unwrap();
let repo = git2::Repository::open(&git_project.root()).unwrap();
let url = path2url(git_project2.root()).to_string();
let repo = git2::Repository::open(&git_project2.root()).unwrap();
let original_submodule_ref = repo.refname_to_id("refs/heads/master").unwrap();
let commit = repo.find_commit(original_submodule_ref).unwrap();
- commit.amend(
- Some("refs/heads/master"),
- None,
- None,
- None,
- Some("something something"),
- None).unwrap();
+ commit
+ .amend(
+ Some("refs/heads/master"),
+ None,
+ None,
+ None,
+ Some("something something"),
+ None,
+ )
+ .unwrap();
let p = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
[dependencies.dep1]
git = '{}'
- "#, git_project.url()))
- .file("src/lib.rs", "
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate dep1;
pub fn foo() { dep1::dep() }
- ")
+ ",
+ )
.build();
- let expected = format!("\
+ let expected = format!(
+ "\
[UPDATING] git repository [..]
[ERROR] failed to load source for a dependency on `dep1`
Caused by:
object not found - no match for id [..]
-", path2url(git_project.root()));
+",
+ path2url(git_project.root())
+ );
- assert_that(p.cargo("build"),
- execs().with_stderr(expected).with_status(101));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(expected).with_status(101),
+ );
}
#[test]
let project = project("foo");
let git1 = git::new("dep1", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "dep1"
version = "0.5.0"
authors = ["carlhuda@example.com"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
}).unwrap();
let git2 = git::new("dep2", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "dep2"
version = "0.5.0"
authors = ["carlhuda@example.com"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
}).unwrap();
let p = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
git = '{}'
[dependencies.dep2]
git = '{}'
- "#, git1.url(), git2.url()))
+ "#,
+ git1.url(),
+ git2.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_stderr(&format!("[UPDATING] git repository `[..]`\n\
- [UPDATING] git repository `[..]`\n\
- [COMPILING] [..] v0.5.0 ([..])\n\
- [COMPILING] [..] v0.5.0 ([..])\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
- p.url())));
-
- File::create(&git1.root().join("src/lib.rs")).unwrap().write_all(br#"
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `[..]`\n\
+ [UPDATING] git repository `[..]`\n\
+ [COMPILING] [..] v0.5.0 ([..])\n\
+ [COMPILING] [..] v0.5.0 ([..])\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ p.url()
+ )),
+ );
+
+ File::create(&git1.root().join("src/lib.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub fn foo() {}
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
let repo = git2::Repository::open(&git1.root()).unwrap();
git::add(&repo);
git::commit(&repo);
- assert_that(p.cargo("update")
- .arg("-p").arg("dep1"),
- execs()
- .with_stderr(&format!("[UPDATING] git repository `{}`\n\
- [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\
- ", git1.url())));
+ assert_that(
+ p.cargo("update").arg("-p").arg("dep1"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\
+ ",
+ git1.url()
+ )),
+ );
}
#[test]
fn stale_cached_version() {
let bar = git::new("meta-dep", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.0"
authors = []
- "#)
- .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ "#,
+ )
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
}).unwrap();
// Update the git database in the cache with the current state of the git
// repo
let foo = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.0.0"
[dependencies.bar]
git = '{}'
- "#, bar.url()))
- .file("src/main.rs", r#"
+ "#,
+ bar.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() { assert_eq!(bar::bar(), 1) }
- "#)
+ "#,
+ )
.build();
assert_that(foo.cargo("build"), execs().with_status(0));
// Update the repo, and simulate someone else updating the lockfile and then
// us pulling it down.
- File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#"
+ File::create(&bar.root().join("src/lib.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub fn bar() -> i32 { 1 + 0 }
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
let repo = git2::Repository::open(&bar.root()).unwrap();
git::add(&repo);
git::commit(&repo);
let rev = repo.revparse_single("HEAD").unwrap().id();
- File::create(&foo.root().join("Cargo.lock")).unwrap().write_all(format!(r#"
+ File::create(&foo.root().join("Cargo.lock"))
+ .unwrap()
+ .write_all(
+ format!(
+ r#"
[[package]]
name = "foo"
version = "0.0.0"
name = "bar"
version = "0.0.0"
source = 'git+{url}#{hash}'
- "#, url = bar.url(), hash = rev).as_bytes()).unwrap();
+ "#,
+ url = bar.url(),
+ hash = rev
+ ).as_bytes(),
+ )
+ .unwrap();
// Now build!
- assert_that(foo.cargo("build"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ foo.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] git repository `{bar}`
[COMPILING] bar v0.0.0 ({bar}#[..])
[COMPILING] foo v0.0.0 ({foo})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", bar = bar.url(), foo = foo.url())));
+",
+ bar = bar.url(),
+ foo = foo.url()
+ )),
+ );
assert_that(foo.process(&foo.bin("foo")), execs().with_status(0));
}
fn dep_with_changed_submodule() {
let project = project("foo");
let git_project = git::new("dep1", |project| {
- project
- .file("Cargo.toml", r#"
+ project.file(
+ "Cargo.toml",
+ r#"
[package]
name = "dep1"
version = "0.5.0"
authors = ["carlhuda@example.com"]
- "#)
+ "#,
+ )
}).unwrap();
let git_project2 = git::new("dep2", |project| {
- project
- .file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }")
+ project.file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }")
}).unwrap();
let git_project3 = git::new("dep3", |project| {
- project
- .file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }")
+ project.file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }")
}).unwrap();
let repo = git2::Repository::open(&git_project.root()).unwrap();
- let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(),
- Path::new("src"));
+ let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), Path::new("src"));
git::commit(&repo);
let p = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.dep1]
git = '{}'
- "#, git_project.url()))
- .file("src/main.rs", "
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate dep1;
pub fn main() { println!(\"{}\", dep1::dep()) }
- ")
+ ",
+ )
.build();
println!("first run");
- assert_that(p.cargo("run"), execs()
- .with_stderr("[UPDATING] git repository `[..]`\n\
- [COMPILING] dep1 v0.5.0 ([..])\n\
- [COMPILING] foo v0.5.0 ([..])\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in \
- [..]\n\
- [RUNNING] `target[/]debug[/]foo[EXE]`\n")
- .with_stdout("project2\n")
- .with_status(0));
-
- File::create(&git_project.root().join(".gitmodules")).unwrap()
- .write_all(format!("[submodule \"src\"]\n\tpath = src\n\turl={}",
- git_project3.url()).as_bytes()).unwrap();
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_stderr(
+ "[UPDATING] git repository `[..]`\n\
+ [COMPILING] dep1 v0.5.0 ([..])\n\
+ [COMPILING] foo v0.5.0 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n\
+ [RUNNING] `target[/]debug[/]foo[EXE]`\n",
+ )
+ .with_stdout("project2\n")
+ .with_status(0),
+ );
+
+ File::create(&git_project.root().join(".gitmodules"))
+ .unwrap()
+ .write_all(
+ format!(
+ "[submodule \"src\"]\n\tpath = src\n\turl={}",
+ git_project3.url()
+ ).as_bytes(),
+ )
+ .unwrap();
// Sync the submodule and reset it to the new remote.
sub.sync().unwrap();
{
let subrepo = sub.open().unwrap();
- subrepo.remote_add_fetch("origin",
- "refs/heads/*:refs/heads/*").unwrap();
- subrepo.remote_set_url("origin",
- &git_project3.url().to_string()).unwrap();
+ subrepo
+ .remote_add_fetch("origin", "refs/heads/*:refs/heads/*")
+ .unwrap();
+ subrepo
+ .remote_set_url("origin", &git_project3.url().to_string())
+ .unwrap();
let mut origin = subrepo.find_remote("origin").unwrap();
origin.fetch(&[], None, None).unwrap();
let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap();
sleep_ms(1000);
// Update the dependency and carry on!
println!("update");
- assert_that(p.cargo("update").arg("-v"),
- execs()
- .with_stderr("")
- .with_stderr(&format!("[UPDATING] git repository `{}`\n\
- [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\
- ", git_project.url())));
+ assert_that(
+ p.cargo("update").arg("-v"),
+ execs().with_stderr("").with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\
+ ",
+ git_project.url()
+ )),
+ );
println!("last run");
- assert_that(p.cargo("run"), execs()
- .with_stderr("[COMPILING] dep1 v0.5.0 ([..])\n\
- [COMPILING] foo v0.5.0 ([..])\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in \
- [..]\n\
- [RUNNING] `target[/]debug[/]foo[EXE]`\n")
- .with_stdout("project3\n")
- .with_status(0));
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_stderr(
+ "[COMPILING] dep1 v0.5.0 ([..])\n\
+ [COMPILING] foo v0.5.0 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n\
+ [RUNNING] `target[/]debug[/]foo[EXE]`\n",
+ )
+ .with_stdout("project3\n")
+ .with_status(0),
+ );
}
#[test]
fn dev_deps_with_testing() {
let p2 = git::new("bar", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn gimme() -> &'static str { "zoidberg" }
- "#)
+ "#,
+ )
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
[dev-dependencies.bar]
version = "0.5.0"
git = '{}'
- "#, p2.url()))
- .file("src/main.rs", r#"
+ "#,
+ p2.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
#[cfg(test)]
extern crate bar;
#[test] fn foo() { bar::gimme(); }
}
- "#)
+ "#,
+ )
.build();
// Generate a lockfile which did not use `bar` to compile, but had to update
// `bar` to generate the lockfile
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "\
[UPDATING] git repository `{bar}`
[COMPILING] foo v0.5.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = p.url(), bar = p2.url())));
+",
+ url = p.url(),
+ bar = p2.url()
+ )),
+ );
// Make sure we use the previous resolution of `bar` instead of updating it
// a second time.
- assert_that(p.cargo("test"),
- execs().with_stderr("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_stderr(
+ "\
[COMPILING] [..] v0.5.0 ([..])
[COMPILING] [..] v0.5.0 ([..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]")
- .with_stdout_contains("test tests::foo ... ok"));
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ )
+ .with_stdout_contains("test tests::foo ... ok"),
+ );
}
#[test]
fn git_build_cmd_freshness() {
let foo = git::new("foo", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", "fn main() {}")
- .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
- .file(".gitignore", "
+ "#,
+ )
+ .file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ .file(
+ ".gitignore",
+ "
src/bar.rs
- ")
+ ",
+ )
}).unwrap();
foo.root().move_into_the_past();
sleep_ms(1000);
- assert_that(foo.cargo("build"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ foo.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = foo.url())));
+",
+ url = foo.url()
+ )),
+ );
// Smoke test to make sure it doesn't compile again
println!("first pass");
- assert_that(foo.cargo("build"),
- execs().with_status(0)
- .with_stdout(""));
+ assert_that(foo.cargo("build"), execs().with_status(0).with_stdout(""));
// Modify an ignored file and make sure we don't rebuild
println!("second pass");
File::create(&foo.root().join("src/bar.rs")).unwrap();
- assert_that(foo.cargo("build"),
- execs().with_status(0)
- .with_stdout(""));
+ assert_that(foo.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[test]
fn git_name_not_always_needed() {
let p2 = git::new("bar", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn gimme() -> &'static str { "zoidberg" }
- "#)
+ "#,
+ )
}).unwrap();
let repo = git2::Repository::open(&p2.root()).unwrap();
let _ = cfg.remove("user.email");
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dev-dependencies.bar]
git = '{}'
- "#, p2.url()))
+ "#,
+ p2.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
// Generate a lockfile which did not use `bar` to compile, but had to update
// `bar` to generate the lockfile
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "\
[UPDATING] git repository `{bar}`
[COMPILING] foo v0.5.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = p.url(), bar = p2.url())));
+",
+ url = p.url(),
+ bar = p2.url()
+ )),
+ );
}
#[test]
fn git_repo_changing_no_rebuild() {
let bar = git::new("bar", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ "#,
+ )
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
}).unwrap();
// Lock p1 to the first rev in the git repo
let p1 = project("p1")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "p1"
version = "0.5.0"
build = 'build.rs'
[dependencies.bar]
git = '{}'
- "#, bar.url()))
+ "#,
+ bar.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.file("build.rs", "fn main() {}")
.build();
p1.root().move_into_the_past();
- assert_that(p1.cargo("build"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p1.cargo("build"),
+ execs().with_stderr(&format!(
+ "\
[UPDATING] git repository `{bar}`
[COMPILING] [..]
[COMPILING] [..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", bar = bar.url())));
+",
+ bar = bar.url()
+ )),
+ );
// Make a commit to lock p2 to a different rev
- File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#"
+ File::create(&bar.root().join("src/lib.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub fn bar() -> i32 { 2 }
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
let repo = git2::Repository::open(&bar.root()).unwrap();
git::add(&repo);
git::commit(&repo);
// Lock p2 to the second rev
let p2 = project("p2")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "p2"
version = "0.5.0"
authors = []
[dependencies.bar]
git = '{}'
- "#, bar.url()))
+ "#,
+ bar.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p2.cargo("build"),
- execs().with_stderr(&format!("\
+ assert_that(
+ p2.cargo("build"),
+ execs().with_stderr(&format!(
+ "\
[UPDATING] git repository `{bar}`
[COMPILING] [..]
[COMPILING] [..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", bar = bar.url())));
+",
+ bar = bar.url()
+ )),
+ );
// And now for the real test! Make sure that p1 doesn't get rebuilt
// even though the git repo has changed.
- assert_that(p1.cargo("build"),
- execs().with_stdout(""));
+ assert_that(p1.cargo("build"), execs().with_stdout(""));
}
#[test]
fn git_dep_build_cmd() {
let p = git::new("foo", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[[bin]]
name = "foo"
- "#)
- .file("src/foo.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
path = "src/bar.rs"
- "#)
- .file("bar/src/bar.rs.in", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs.in",
+ r#"
pub fn gimme() -> i32 { 0 }
- "#)
- .file("bar/build.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
use std::fs;
fn main() {
fs::copy("src/bar.rs.in", "src/bar.rs").unwrap();
}
- "#)
+ "#,
+ )
}).unwrap();
p.root().join("bar").move_into_the_past();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
- assert_that(process(&p.bin("foo")),
- execs().with_stdout("0\n"));
+ assert_that(process(&p.bin("foo")), execs().with_stdout("0\n"));
// Touching bar.rs.in should cause the `build` command to run again.
- fs::File::create(&p.root().join("bar/src/bar.rs.in")).unwrap()
- .write_all(b"pub fn gimme() -> i32 { 1 }").unwrap();
+ fs::File::create(&p.root().join("bar/src/bar.rs.in"))
+ .unwrap()
+ .write_all(b"pub fn gimme() -> i32 { 1 }")
+ .unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
- assert_that(process(&p.bin("foo")),
- execs().with_stdout("1\n"));
+ assert_that(process(&p.bin("foo")), execs().with_stdout("1\n"));
}
#[test]
fn fetch_downloads() {
let bar = git::new("bar", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+ "#,
+ )
+ .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
}).unwrap();
let p = project("p1")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "p1"
version = "0.5.0"
authors = []
[dependencies.bar]
git = '{}'
- "#, bar.url()))
+ "#,
+ bar.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("fetch"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("fetch"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] git repository `{url}`
-", url = bar.url())));
+",
+ url = bar.url()
+ )),
+ );
- assert_that(p.cargo("fetch"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("fetch"), execs().with_status(0).with_stdout(""));
}
#[test]
fn warnings_in_git_dep() {
let bar = git::new("bar", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", "fn unused() {}")
+ "#,
+ )
+ .file("src/lib.rs", "fn unused() {}")
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
[dependencies.bar]
git = '{}'
- "#, bar.url()))
+ "#,
+ bar.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs()
- .with_stderr(&format!("[UPDATING] git repository `{}`\n\
- [COMPILING] bar v0.5.0 ({}#[..])\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
- bar.url(),
- bar.url(),
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ [COMPILING] bar v0.5.0 ({}#[..])\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+ bar.url(),
+ bar.url(),
+ p.url()
+ )),
+ );
}
#[test]
fn update_ambiguous() {
let foo1 = git::new("foo1", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", "")
+ "#,
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let foo2 = git::new("foo2", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.6.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", "")
+ "#,
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let bar = git::new("bar", |project| {
- project.file("Cargo.toml", &format!(r#"
+ project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "bar"
version = "0.5.0"
[dependencies.foo]
git = '{}'
- "#, foo2.url()))
- .file("src/lib.rs", "")
+ "#,
+ foo2.url()
+ ),
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let p = project("project")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "project"
version = "0.5.0"
git = '{}'
[dependencies.bar]
git = '{}'
- "#, foo1.url(), bar.url()))
+ "#,
+ foo1.url(),
+ bar.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
- assert_that(p.cargo("update")
- .arg("-p").arg("foo"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("update").arg("-p").arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] There are multiple `foo` packages in your project, and the specification `foo` \
is ambiguous.
Please re-run this command with `-p <spec>` where `<spec>` is one of the \
following:
foo:0.[..].0
foo:0.[..].0
-"));
+",
+ ),
+ );
}
#[test]
fn update_one_dep_in_repo_with_many_deps() {
let foo = git::new("foo", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("a/src/lib.rs", "")
+ "#,
+ )
+ .file("a/src/lib.rs", "")
}).unwrap();
let p = project("project")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "project"
version = "0.5.0"
git = '{}'
[dependencies.a]
git = '{}'
- "#, foo.url(), foo.url()))
+ "#,
+ foo.url(),
+ foo.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
- assert_that(p.cargo("update")
- .arg("-p").arg("foo"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("update").arg("-p").arg("foo"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] git repository `{}`
-", foo.url())));
+",
+ foo.url()
+ )),
+ );
}
#[test]
fn switch_deps_does_not_update_transitive() {
let transitive = git::new("transitive", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "transitive"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", "")
+ "#,
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let dep1 = git::new("dep1", |project| {
- project.file("Cargo.toml", &format!(r#"
+ project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "dep"
version = "0.5.0"
[dependencies.transitive]
git = '{}'
- "#, transitive.url()))
- .file("src/lib.rs", "")
+ "#,
+ transitive.url()
+ ),
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let dep2 = git::new("dep2", |project| {
- project.file("Cargo.toml", &format!(r#"
+ project
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "dep"
version = "0.5.0"
[dependencies.transitive]
git = '{}'
- "#, transitive.url()))
- .file("src/lib.rs", "")
+ "#,
+ transitive.url()
+ ),
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let p = project("project")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "project"
version = "0.5.0"
authors = []
[dependencies.dep]
git = '{}'
- "#, dep1.url()))
+ "#,
+ dep1.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] git repository `{}`
[UPDATING] git repository `{}`
[COMPILING] transitive [..]
[COMPILING] dep [..]
[COMPILING] project [..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dep1.url(), transitive.url())));
+",
+ dep1.url(),
+ transitive.url()
+ )),
+ );
// Update the dependency to point to the second repository, but this
// shouldn't update the transitive dependency which is the same.
- File::create(&p.root().join("Cargo.toml")).unwrap().write_all(format!(r#"
+ File::create(&p.root().join("Cargo.toml"))
+ .unwrap()
+ .write_all(
+ format!(
+ r#"
[project]
name = "project"
version = "0.5.0"
authors = []
[dependencies.dep]
git = '{}'
- "#, dep2.url()).as_bytes()).unwrap();
+ "#,
+ dep2.url()
+ ).as_bytes(),
+ )
+ .unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] git repository `{}`
[COMPILING] dep [..]
[COMPILING] project [..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dep2.url())));
+",
+ dep2.url()
+ )),
+ );
}
#[test]
fn update_one_source_updates_all_packages_in_that_git_source() {
let dep = git::new("dep", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "dep"
version = "0.5.0"
[dependencies.a]
path = "a"
- "#)
- .file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.5.0"
authors = []
- "#)
- .file("a/src/lib.rs", "")
+ "#,
+ )
+ .file("a/src/lib.rs", "")
}).unwrap();
let p = project("project")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "project"
version = "0.5.0"
authors = []
[dependencies.dep]
git = '{}'
- "#, dep.url()))
+ "#,
+ dep.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
let repo = git2::Repository::open(&dep.root()).unwrap();
let rev1 = repo.revparse_single("HEAD").unwrap().id();
// Just be sure to change a file
- File::create(&dep.root().join("src/lib.rs")).unwrap().write_all(br#"
+ File::create(&dep.root().join("src/lib.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub fn bar() -> i32 { 2 }
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
git::add(&repo);
git::commit(&repo);
- assert_that(p.cargo("update").arg("-p").arg("dep"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("update").arg("-p").arg("dep"),
+ execs().with_status(0),
+ );
let mut lockfile = String::new();
- File::open(&p.root().join("Cargo.lock")).unwrap()
- .read_to_string(&mut lockfile).unwrap();
- assert!(!lockfile.contains(&rev1.to_string()),
- "{} in {}", rev1, lockfile);
+ File::open(&p.root().join("Cargo.lock"))
+ .unwrap()
+ .read_to_string(&mut lockfile)
+ .unwrap();
+ assert!(
+ !lockfile.contains(&rev1.to_string()),
+ "{} in {}",
+ rev1,
+ lockfile
+ );
}
#[test]
fn switch_sources() {
let a1 = git::new("a1", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.5.0"
authors = []
- "#)
- .file("src/lib.rs", "")
+ "#,
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let a2 = git::new("a2", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.5.1"
authors = []
- "#)
- .file("src/lib.rs", "")
+ "#,
+ )
+ .file("src/lib.rs", "")
}).unwrap();
let p = project("project")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "project"
version = "0.5.0"
authors = []
[dependencies.b]
path = "b"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("b/Cargo.toml", &format!(r#"
+ .file(
+ "b/Cargo.toml",
+ &format!(
+ r#"
[project]
name = "b"
version = "0.5.0"
authors = []
[dependencies.a]
git = '{}'
- "#, a1.url()))
+ "#,
+ a1.url()
+ ),
+ )
.file("b/src/lib.rs", "pub fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] git repository `file://[..]a1`
[COMPILING] a v0.5.0 ([..]a1#[..]
[COMPILING] b v0.5.0 ([..])
[COMPILING] project v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- File::create(&p.root().join("b/Cargo.toml")).unwrap().write_all(format!(r#"
+",
+ ),
+ );
+
+ File::create(&p.root().join("b/Cargo.toml"))
+ .unwrap()
+ .write_all(
+ format!(
+ r#"
[project]
name = "b"
version = "0.5.0"
authors = []
[dependencies.a]
git = '{}'
- "#, a2.url()).as_bytes()).unwrap();
+ "#,
+ a2.url()
+ ).as_bytes(),
+ )
+ .unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] git repository `file://[..]a2`
[COMPILING] a v0.5.1 ([..]a2#[..]
[COMPILING] b v0.5.0 ([..])
[COMPILING] project v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn dont_require_submodules_are_checked_out() {
let p = project("foo").build();
let git1 = git::new("dep1", |p| {
- p.file("Cargo.toml", r#"
+ p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", "fn main() {}")
- .file("src/lib.rs", "")
- .file("a/foo", "")
+ "#,
+ ).file("build.rs", "fn main() {}")
+ .file("src/lib.rs", "")
+ .file("a/foo", "")
}).unwrap();
let git2 = git::new("dep2", |p| p).unwrap();
let dst = paths::home().join("foo");
git2::Repository::clone(&url, &dst).unwrap();
- assert_that(git1.cargo("build").arg("-v").cwd(&dst),
- execs().with_status(0));
+ assert_that(
+ git1.cargo("build").arg("-v").cwd(&dst),
+ execs().with_status(0),
+ );
}
#[test]
fn doctest_same_name() {
let a2 = git::new("a2", |p| {
- p.file("Cargo.toml", r#"
+ p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
- "#)
- .file("src/lib.rs", "pub fn a2() {}")
+ "#,
+ ).file("src/lib.rs", "pub fn a2() {}")
}).unwrap();
let a1 = git::new("a1", |p| {
- p.file("Cargo.toml", &format!(r#"
+ p.file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
[dependencies]
a = {{ git = '{}' }}
- "#, a2.url()))
- .file("src/lib.rs", "extern crate a; pub fn a1() {}")
+ "#,
+ a2.url()
+ ),
+ ).file("src/lib.rs", "extern crate a; pub fn a1() {}")
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
a = {{ git = '{}' }}
- "#, a1.url()))
- .file("src/lib.rs", r#"
+ "#,
+ a1.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[macro_use]
extern crate a;
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
}
#[test]
fn lints_are_suppressed() {
let a = git::new("a", |p| {
- p.file("Cargo.toml", r#"
+ p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ ).file(
+ "src/lib.rs",
+ "
use std::option;
- ")
+ ",
+ )
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
a = {{ git = '{}' }}
- "#, a.url()))
+ "#,
+ a.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] git repository `[..]`
[COMPILING] a v0.5.0 ([..])
[COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn denied_lints_are_allowed() {
let a = git::new("a", |p| {
- p.file("Cargo.toml", r#"
+ p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ ).file(
+ "src/lib.rs",
+ "
#![deny(warnings)]
use std::option;
- ")
+ ",
+ )
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
a = {{ git = '{}' }}
- "#, a.url()))
+ "#,
+ a.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] git repository `[..]`
[COMPILING] a v0.5.0 ([..])
[COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn add_a_git_dep() {
let git = git::new("git", |p| {
- p.file("Cargo.toml", r#"
+ p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "git"
version = "0.5.0"
authors = []
- "#)
- .file("src/lib.rs", "")
+ "#,
+ ).file("src/lib.rs", "")
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
a = {{ path = 'a' }}
git = {{ git = '{}' }}
- "#, git.url()))
+ "#,
+ git.url()
+ ),
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
- File::create(p.root().join("a/Cargo.toml")).unwrap().write_all(format!(r#"
+ File::create(p.root().join("a/Cargo.toml"))
+ .unwrap()
+ .write_all(
+ format!(
+ r#"
[package]
name = "a"
version = "0.0.1"
[dependencies]
git = {{ git = '{}' }}
- "#, git.url()).as_bytes()).unwrap();
+ "#,
+ git.url()
+ ).as_bytes(),
+ )
+ .unwrap();
assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn two_at_rev_instead_of_tag() {
let git = git::new("git", |p| {
- p.file("Cargo.toml", r#"
+ p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "git1"
version = "0.5.0"
authors = []
- "#)
- .file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ "#,
+ ).file("src/lib.rs", "")
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "git2"
version = "0.5.0"
authors = []
- "#)
- .file("a/src/lib.rs", "")
+ "#,
+ )
+ .file("a/src/lib.rs", "")
}).unwrap();
// Make a tag corresponding to the current HEAD
let repo = git2::Repository::open(&git.root()).unwrap();
let head = repo.head().unwrap().target().unwrap();
- repo.tag("v0.1.0",
- &repo.find_object(head, None).unwrap(),
- &repo.signature().unwrap(),
- "make a new tag",
- false).unwrap();
+ repo.tag(
+ "v0.1.0",
+ &repo.find_object(head, None).unwrap(),
+ &repo.signature().unwrap(),
+ "make a new tag",
+ false,
+ ).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
git1 = {{ git = '{0}', rev = 'v0.1.0' }}
git2 = {{ git = '{0}', rev = 'v0.1.0' }}
- "#, git.url()))
+ "#,
+ git.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
#[ignore] // accesses crates.io
fn include_overrides_gitignore() {
let p = git::new("reduction", |repo| {
- repo.file("Cargo.toml", r#"
+ repo.file(
+ "Cargo.toml",
+ r#"
[package]
name = "reduction"
version = "0.5.0"
[build-dependencies]
filetime = "0.1"
- "#)
- .file(".gitignore", r#"
+ "#,
+ ).file(
+ ".gitignore",
+ r#"
target
Cargo.lock
# Below files represent generated code, thus not managed by `git`
src/incl.rs
src/not_incl.rs
- "#)
- .file("tango-build.rs", r#"
+ "#,
+ )
+ .file(
+ "tango-build.rs",
+ r#"
extern crate filetime;
use filetime::FileTime;
use std::fs::{self, File};
filetime::set_file_times(file, atime, mtime).unwrap();
}
}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
mod not_incl;
mod incl;
- "#)
- .file("src/mod.md", r#"
+ "#,
+ )
+ .file(
+ "src/mod.md",
+ r#"
(The content of this file does not matter since we are not doing real codegen.)
- "#)
+ "#,
+ )
}).unwrap();
println!("build 1: all is new");
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] filetime [..]
[DOWNLOADING] libc [..]
[RUNNING] `[..][/]build-script-tango-build`
[RUNNING] `rustc --crate-name reduction src[/]lib.rs --crate-type lib [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
println!("build 2: nothing changed; file timestamps reset by build script");
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[FRESH] libc [..]
[FRESH] filetime [..]
[FRESH] reduction [..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
println!("build 3: touch `src/not_incl.rs`; expect build script *not* re-run");
sleep_ms(1000);
File::create(p.root().join("src").join("not_incl.rs")).unwrap();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[FRESH] libc [..]
[FRESH] filetime [..]
[COMPILING] reduction [..]
[RUNNING] `rustc --crate-name reduction src[/]lib.rs --crate-type lib [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
// This final case models the bug from rust-lang/cargo#4135: an
// explicitly included file should cause a build-script re-run,
sleep_ms(1000);
File::create(p.root().join("src").join("incl.rs")).unwrap();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[FRESH] libc [..]
[FRESH] filetime [..]
[COMPILING] reduction [..]
[RUNNING] `[..][/]build-script-tango-build`
[RUNNING] `rustc --crate-name reduction src[/]lib.rs --crate-type lib [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
let project = project("foo");
let git_project = git::new("dep1", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "dep1"
[lib]
name = "dep1"
- "#)
- .file("src/dep1.rs", r#"
+ "#,
+ )
+ .file(
+ "src/dep1.rs",
+ r#"
pub fn hello() -> &'static str {
"hello world"
}
- "#)
+ "#,
+ )
}).unwrap();
let project = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
[dependencies.dep1]
git = '{}'
- "#, git_project.url()))
- .file("src/main.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"]))
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+ )
.build();
let git_root = git_project.root();
- assert_that(project.cargo("build"),
- execs()
- .with_stderr(&format!("[UPDATING] git repository `{}`\n\
- error: failed to load source for a dependency on `dep1`\n\
- \n\
- Caused by:\n \
- Unable to update {}\n\
- \n\
- Caused by:\n \
- failed to parse manifest at `[..]`\n\
- \n\
- Caused by:\n \
- could not parse input as TOML\n\
- \n\
- Caused by:\n \
- duplicate key: `categories` for key `project`",
- path2url(git_root.clone()),
- path2url(git_root),
- )));
+ assert_that(
+ project.cargo("build"),
+ execs().with_stderr(&format!(
+ "[UPDATING] git repository `{}`\n\
+ error: failed to load source for a dependency on `dep1`\n\
+ \n\
+ Caused by:\n \
+ Unable to update {}\n\
+ \n\
+ Caused by:\n \
+ failed to parse manifest at `[..]`\n\
+ \n\
+ Caused by:\n \
+ could not parse input as TOML\n\
+ \n\
+ Caused by:\n \
+ duplicate key: `categories` for key `project`",
+ path2url(git_root.clone()),
+ path2url(git_root),
+ )),
+ );
}
#[test]
fn failed_submodule_checkout() {
let project = project("foo");
let git_project = git::new("dep1", |project| {
- project
- .file("Cargo.toml", r#"
+ project.file(
+ "Cargo.toml",
+ r#"
[package]
name = "dep1"
version = "0.5.0"
authors = [""]
- "#)
+ "#,
+ )
}).unwrap();
- let git_project2 = git::new("dep2", |project| {
- project.file("lib.rs", "")
- }).unwrap();
+ let git_project2 = git::new("dep2", |project| project.file("lib.rs", "")).unwrap();
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let addr = listener.local_addr().unwrap();
drop(repo);
let project = project
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
dep1 = {{ git = '{}' }}
- "#, git_project.url()))
+ "#,
+ git_project.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(project.cargo("build"),
- execs().with_status(101)
- .with_stderr_contains(" failed to update submodule `src`")
- .with_stderr_contains(" failed to update submodule `bar`"));
- assert_that(project.cargo("build"),
- execs().with_status(101)
- .with_stderr_contains(" failed to update submodule `src`")
- .with_stderr_contains(" failed to update submodule `bar`"));
+ assert_that(
+ project.cargo("build"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains(" failed to update submodule `src`")
+ .with_stderr_contains(" failed to update submodule `bar`"),
+ );
+ assert_that(
+ project.cargo("build"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains(" failed to update submodule `src`")
+ .with_stderr_contains(" failed to update submodule `bar`"),
+ );
done.store(true, Ordering::SeqCst);
drop(TcpStream::connect(&addr));
#[derive(Debug)]
pub struct ExistingFile;
-impl<P> Matcher<P> for ExistingFile where P: AsRef<Path> {
+impl<P> Matcher<P> for ExistingFile
+where
+ P: AsRef<Path>,
+{
fn matches(&self, actual: P) -> Result<(), String> {
if actual.as_ref().is_file() {
Ok(())
#[derive(Debug)]
pub struct ExistingDir;
-impl<P> Matcher<P> for ExistingDir where P: AsRef<Path> {
+impl<P> Matcher<P> for ExistingDir
+where
+ P: AsRef<Path>,
+{
fn matches(&self, actual: P) -> Result<(), String> {
if actual.as_ref().is_dir() {
Ok(())
}
pub fn is_not<T, M: Matcher<T>>(matcher: M) -> IsNot<T, M> {
- IsNot { matcher, _marker: marker::PhantomData }
+ IsNot {
+ matcher,
+ _marker: marker::PhantomData,
+ }
}
#[derive(Debug)]
_marker: marker::PhantomData<T>,
}
-impl<T, M: Matcher<T>> Matcher<T> for IsNot<T, M> where T: fmt::Debug {
+impl<T, M: Matcher<T>> Matcher<T> for IsNot<T, M>
+where
+ T: fmt::Debug,
+{
fn matches(&self, actual: T) -> Result<(), String> {
match self.matcher.matches(actual) {
Ok(_) => Err("matched".to_string()),
#[derive(Debug)]
pub struct Contains<T>(Vec<T>);
-impl<'a, T> Matcher<&'a Vec<T>> for Contains<T> where T: fmt::Debug + PartialEq {
+impl<'a, T> Matcher<&'a Vec<T>> for Contains<T>
+where
+ T: fmt::Debug + PartialEq,
+{
fn matches(&self, actual: &'a Vec<T>) -> Result<(), String> {
for item in self.0.iter() {
if !actual.contains(item) {
- return Err(format!("failed to find {:?}", item))
+ return Err(format!("failed to find {:?}", item));
}
}
Ok(())
use std::env;
use cargo::util::ProcessBuilder;
-use cargotest::support::{execs, paths, cargo_exe};
-use hamcrest::{assert_that, existing_file, existing_dir, is_not};
+use cargotest::support::{cargo_exe, execs, paths};
+use hamcrest::{assert_that, existing_dir, existing_file, is_not};
use tempdir::TempDir;
fn cargo_process(s: &str) -> ProcessBuilder {
#[test]
fn simple_lib() {
- assert_that(cargo_process("init").arg("--lib").arg("--vcs").arg("none")
- .env("USER", "foo"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ cargo_process("init")
+ .arg("--lib")
+ .arg("--vcs")
+ .arg("none")
+ .env("USER", "foo"),
+ execs().with_status(0).with_stderr(
+ "\
[CREATED] library project
-"));
+",
+ ),
+ );
assert_that(&paths::root().join("Cargo.toml"), existing_file());
assert_that(&paths::root().join("src/lib.rs"), existing_file());
assert_that(&paths::root().join(".gitignore"), is_not(existing_file()));
- assert_that(cargo_process("build"),
- execs().with_status(0));
+ assert_that(cargo_process("build"), execs().with_status(0));
}
#[test]
fn simple_bin() {
let path = paths::root().join("foo");
fs::create_dir(&path).unwrap();
- assert_that(cargo_process("init").arg("--bin").arg("--vcs").arg("none")
- .env("USER", "foo").cwd(&path),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ cargo_process("init")
+ .arg("--bin")
+ .arg("--vcs")
+ .arg("none")
+ .env("USER", "foo")
+ .cwd(&path),
+ execs().with_status(0).with_stderr(
+ "\
[CREATED] binary (application) project
-"));
+",
+ ),
+ );
assert_that(&paths::root().join("foo/Cargo.toml"), existing_file());
assert_that(&paths::root().join("foo/src/main.rs"), existing_file());
- assert_that(cargo_process("build").cwd(&path),
- execs().with_status(0));
- assert_that(&paths::root().join(&format!("foo/target/debug/foo{}",
- env::consts::EXE_SUFFIX)),
- existing_file());
+ assert_that(cargo_process("build").cwd(&path), execs().with_status(0));
+ assert_that(
+ &paths::root().join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)),
+ existing_file(),
+ );
}
#[test]
fn both_lib_and_bin() {
let td = TempDir::new("cargo").unwrap();
- assert_that(cargo_process("init").arg("--lib").arg("--bin").cwd(td.path())
- .env("USER", "foo"),
- execs().with_status(101).with_stderr(
- "[ERROR] can't specify both lib and binary outputs"));
+ assert_that(
+ cargo_process("init")
+ .arg("--lib")
+ .arg("--bin")
+ .cwd(td.path())
+ .env("USER", "foo"),
+ execs()
+ .with_status(101)
+ .with_stderr("[ERROR] can't specify both lib and binary outputs"),
+ );
}
fn bin_already_exists(explicit: bool, rellocation: &str) {
}
"#;
- File::create(&sourcefile_path).unwrap().write_all(content).unwrap();
+ File::create(&sourcefile_path)
+ .unwrap()
+ .write_all(content)
+ .unwrap();
if explicit {
- assert_that(cargo_process("init").arg("--bin").arg("--vcs").arg("none")
- .env("USER", "foo").cwd(&path),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("--bin")
+ .arg("--vcs")
+ .arg("none")
+ .env("USER", "foo")
+ .cwd(&path),
+ execs().with_status(0),
+ );
} else {
- assert_that(cargo_process("init").arg("--vcs").arg("none")
- .env("USER", "foo").cwd(&path),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("--vcs")
+ .arg("none")
+ .env("USER", "foo")
+ .cwd(&path),
+ execs().with_status(0),
+ );
}
assert_that(&paths::root().join("foo/Cargo.toml"), existing_file());
- assert_that(&paths::root().join("foo/src/lib.rs"), is_not(existing_file()));
+ assert_that(
+ &paths::root().join("foo/src/lib.rs"),
+ is_not(existing_file()),
+ );
// Check that our file is not overwritten
let mut new_content = Vec::new();
- File::open(&sourcefile_path).unwrap().read_to_end(&mut new_content).unwrap();
+ File::open(&sourcefile_path)
+ .unwrap()
+ .read_to_end(&mut new_content)
+ .unwrap();
assert_eq!(Vec::from(content as &[u8]), new_content);
}
let sourcefile_path1 = path.join("src/lib.rs");
- File::create(&sourcefile_path1).unwrap().write_all(br#"
+ File::create(&sourcefile_path1)
+ .unwrap()
+ .write_all(
+ br#"
fn qqq () {
println!("Hello, world 2!");
}
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
let sourcefile_path2 = path.join("lib.rs");
- File::create(&sourcefile_path2).unwrap().write_all(br#"
+ File::create(&sourcefile_path2)
+ .unwrap()
+ .write_all(
+ br#"
fn qqq () {
println!("Hello, world 3!");
}
- "#).unwrap();
-
- assert_that(cargo_process("init").arg("--vcs").arg("none")
- .env("USER", "foo").cwd(&path),
- execs().with_status(101).with_stderr("\
+ "#,
+ )
+ .unwrap();
+
+ assert_that(
+ cargo_process("init")
+ .arg("--vcs")
+ .arg("none")
+ .env("USER", "foo")
+ .cwd(&path),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] cannot have a project with multiple libraries, found both `src/lib.rs` and `lib.rs`
-"));
+",
+ ),
+ );
- assert_that(&paths::root().join("foo/Cargo.toml"), is_not(existing_file()));
+ assert_that(
+ &paths::root().join("foo/Cargo.toml"),
+ is_not(existing_file()),
+ );
}
-
#[test]
fn multibin_project_name_clash() {
let path = paths::root().join("foo");
let sourcefile_path1 = path.join("foo.rs");
- File::create(&sourcefile_path1).unwrap().write_all(br#"
+ File::create(&sourcefile_path1)
+ .unwrap()
+ .write_all(
+ br#"
fn main () {
println!("Hello, world 2!");
}
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
let sourcefile_path2 = path.join("main.rs");
- File::create(&sourcefile_path2).unwrap().write_all(br#"
+ File::create(&sourcefile_path2)
+ .unwrap()
+ .write_all(
+ br#"
fn main () {
println!("Hello, world 3!");
}
- "#).unwrap();
-
- assert_that(cargo_process("init").arg("--lib").arg("--vcs").arg("none")
- .env("USER", "foo").cwd(&path),
- execs().with_status(101).with_stderr("\
+ "#,
+ )
+ .unwrap();
+
+ assert_that(
+ cargo_process("init")
+ .arg("--lib")
+ .arg("--vcs")
+ .arg("none")
+ .env("USER", "foo")
+ .cwd(&path),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] multiple possible binary sources found:
main.rs
foo.rs
cannot automatically generate Cargo.toml as the main target would be ambiguous
-"));
+",
+ ),
+ );
- assert_that(&paths::root().join("foo/Cargo.toml"), is_not(existing_file()));
+ assert_that(
+ &paths::root().join("foo/Cargo.toml"),
+ is_not(existing_file()),
+ );
}
fn lib_already_exists(rellocation: &str) {
pub fn qqq() {}
"#;
- File::create(&sourcefile_path).unwrap().write_all(content).unwrap();
+ File::create(&sourcefile_path)
+ .unwrap()
+ .write_all(content)
+ .unwrap();
- assert_that(cargo_process("init").arg("--vcs").arg("none")
- .env("USER", "foo").cwd(&path),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("--vcs")
+ .arg("none")
+ .env("USER", "foo")
+ .cwd(&path),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join("foo/Cargo.toml"), existing_file());
- assert_that(&paths::root().join("foo/src/main.rs"), is_not(existing_file()));
+ assert_that(
+ &paths::root().join("foo/src/main.rs"),
+ is_not(existing_file()),
+ );
// Check that our file is not overwritten
let mut new_content = Vec::new();
- File::open(&sourcefile_path).unwrap().read_to_end(&mut new_content).unwrap();
+ File::open(&sourcefile_path)
+ .unwrap()
+ .read_to_end(&mut new_content)
+ .unwrap();
assert_eq!(Vec::from(content as &[u8]), new_content);
}
#[test]
fn simple_git() {
- assert_that(cargo_process("init").arg("--lib")
- .arg("--vcs")
- .arg("git")
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("--lib")
+ .arg("--vcs")
+ .arg("git")
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join("Cargo.toml"), existing_file());
assert_that(&paths::root().join("src/lib.rs"), existing_file());
let td = TempDir::new("cargo").unwrap();
let foo = &td.path().join("foo");
fs::create_dir_all(&foo).unwrap();
- assert_that(cargo_process("init").arg("--lib")
- .cwd(foo.clone())
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("--lib")
+ .cwd(foo.clone())
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&foo.join("Cargo.toml"), existing_file());
assert_that(&foo.join("src/lib.rs"), existing_file());
fn invalid_dir_name() {
let foo = &paths::root().join("foo.bar");
fs::create_dir_all(&foo).unwrap();
- assert_that(cargo_process("init").cwd(foo.clone())
- .env("USER", "foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("init").cwd(foo.clone()).env("USER", "foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] Invalid character `.` in crate name: `foo.bar`
use --name to override crate name
-"));
+",
+ ),
+ );
assert_that(&foo.join("Cargo.toml"), is_not(existing_file()));
}
fn reserved_name() {
let test = &paths::root().join("test");
fs::create_dir_all(&test).unwrap();
- assert_that(cargo_process("init").cwd(test.clone())
- .env("USER", "foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("init").cwd(test.clone()).env("USER", "foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] The name `test` cannot be used as a crate name\n\
use --name to override crate name
-"));
+",
+ ),
+ );
assert_that(&test.join("Cargo.toml"), is_not(existing_file()));
}
fn git_autodetect() {
fs::create_dir(&paths::root().join(".git")).unwrap();
- assert_that(cargo_process("init").arg("--lib")
- .env("USER", "foo"),
- execs().with_status(0));
-
+ assert_that(
+ cargo_process("init").arg("--lib").env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join("Cargo.toml"), existing_file());
assert_that(&paths::root().join("src/lib.rs"), existing_file());
assert_that(&paths::root().join(".gitignore"), existing_file());
}
-
#[test]
fn mercurial_autodetect() {
fs::create_dir(&paths::root().join(".hg")).unwrap();
- assert_that(cargo_process("init").arg("--lib")
- .env("USER", "foo"),
- execs().with_status(0));
-
+ assert_that(
+ cargo_process("init").arg("--lib").env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join("Cargo.toml"), existing_file());
assert_that(&paths::root().join("src/lib.rs"), existing_file());
fn gitignore_appended_not_replaced() {
fs::create_dir(&paths::root().join(".git")).unwrap();
- File::create(&paths::root().join(".gitignore")).unwrap().write_all(b"qqqqqq\n").unwrap();
-
- assert_that(cargo_process("init").arg("--lib")
- .env("USER", "foo"),
- execs().with_status(0));
+ File::create(&paths::root().join(".gitignore"))
+ .unwrap()
+ .write_all(b"qqqqqq\n")
+ .unwrap();
+ assert_that(
+ cargo_process("init").arg("--lib").env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join("Cargo.toml"), existing_file());
assert_that(&paths::root().join("src/lib.rs"), existing_file());
assert_that(&paths::root().join(".gitignore"), existing_file());
let mut contents = String::new();
- File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&paths::root().join(".gitignore"))
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"qqqqqq"#));
}
fn gitignore_added_newline_if_required() {
fs::create_dir(&paths::root().join(".git")).unwrap();
- File::create(&paths::root().join(".gitignore")).unwrap().write_all(b"first").unwrap();
+ File::create(&paths::root().join(".gitignore"))
+ .unwrap()
+ .write_all(b"first")
+ .unwrap();
- assert_that(cargo_process("init").arg("--lib")
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init").arg("--lib").env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join(".gitignore"), existing_file());
let mut contents = String::new();
- File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&paths::root().join(".gitignore"))
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.starts_with("first\n"));
}
fn mercurial_added_newline_if_required() {
fs::create_dir(&paths::root().join(".hg")).unwrap();
- File::create(&paths::root().join(".hgignore")).unwrap().write_all(b"first").unwrap();
+ File::create(&paths::root().join(".hgignore"))
+ .unwrap()
+ .write_all(b"first")
+ .unwrap();
- assert_that(cargo_process("init").arg("--lib")
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init").arg("--lib").env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join(".hgignore"), existing_file());
let mut contents = String::new();
- File::open(&paths::root().join(".hgignore")).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&paths::root().join(".hgignore"))
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.starts_with("first\n"));
}
fn cargo_lock_gitignored_if_lib1() {
fs::create_dir(&paths::root().join(".git")).unwrap();
- assert_that(cargo_process("init").arg("--lib").arg("--vcs").arg("git")
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("--lib")
+ .arg("--vcs")
+ .arg("git")
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join(".gitignore"), existing_file());
let mut contents = String::new();
- File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&paths::root().join(".gitignore"))
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"Cargo.lock"#));
}
fn cargo_lock_gitignored_if_lib2() {
fs::create_dir(&paths::root().join(".git")).unwrap();
- File::create(&paths::root().join("lib.rs")).unwrap().write_all(br#""#).unwrap();
+ File::create(&paths::root().join("lib.rs"))
+ .unwrap()
+ .write_all(br#""#)
+ .unwrap();
- assert_that(cargo_process("init").arg("--vcs").arg("git")
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("--vcs")
+ .arg("git")
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join(".gitignore"), existing_file());
let mut contents = String::new();
- File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&paths::root().join(".gitignore"))
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"Cargo.lock"#));
}
fn cargo_lock_not_gitignored_if_bin1() {
fs::create_dir(&paths::root().join(".git")).unwrap();
- assert_that(cargo_process("init").arg("--vcs").arg("git")
- .arg("--bin")
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("--vcs")
+ .arg("git")
+ .arg("--bin")
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join(".gitignore"), existing_file());
let mut contents = String::new();
- File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&paths::root().join(".gitignore"))
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(!contents.contains(r#"Cargo.lock"#));
}
fn cargo_lock_not_gitignored_if_bin2() {
fs::create_dir(&paths::root().join(".git")).unwrap();
- File::create(&paths::root().join("main.rs")).unwrap().write_all(br#""#).unwrap();
+ File::create(&paths::root().join("main.rs"))
+ .unwrap()
+ .write_all(br#""#)
+ .unwrap();
- assert_that(cargo_process("init").arg("--vcs").arg("git")
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("--vcs")
+ .arg("git")
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join(".gitignore"), existing_file());
let mut contents = String::new();
- File::open(&paths::root().join(".gitignore")).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&paths::root().join(".gitignore"))
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(!contents.contains(r#"Cargo.lock"#));
}
#[test]
fn with_argument() {
- assert_that(cargo_process("init").arg("foo").arg("--vcs").arg("none")
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("init")
+ .arg("foo")
+ .arg("--vcs")
+ .arg("none")
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(&paths::root().join("foo/Cargo.toml"), existing_file());
}
-
#[test]
fn unknown_flags() {
- assert_that(cargo_process("init").arg("foo").arg("--flag"),
- execs().with_status(1)
- .with_stderr_contains("\
+ assert_that(
+ cargo_process("init").arg("foo").arg("--flag"),
+ execs().with_status(1).with_stderr_contains(
+ "\
error: Found argument '--flag' which wasn't expected, or isn't valid in this context
-"));
+",
+ ),
+ );
}
#[cfg(not(windows))]
#[test]
fn no_filename() {
- assert_that(cargo_process("init").arg("/"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ cargo_process("init").arg("/"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] cannot auto-detect project name from path \"/\" ; use --name to override
-".to_string()));
+"
+ .to_string(),
+ ),
+ );
}
use cargotest::support::git;
use cargotest::support::paths;
use cargotest::support::registry::Package;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::{assert_that, existing_dir, is_not};
fn cargo_process(s: &str) -> ProcessBuilder {
fn pkg(name: &str, vers: &str) {
Package::new(name, vers)
.file("src/lib.rs", "")
- .file("src/main.rs", &format!("
+ .file(
+ "src/main.rs",
+ &format!(
+ "
extern crate {};
fn main() {{}}
- ", name))
+ ",
+ name
+ ),
+ )
.publish();
}
fn simple() {
pkg("foo", "0.0.1");
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ cargo_process("install").arg("foo"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] foo v0.0.1 (registry [..])
[INSTALLING] foo v0.0.1
[INSTALLING] {home}[..]bin[..]foo[..]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
- home = cargo_home().display())));
+ home = cargo_home().display()
+ )),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
- assert_that(cargo_process("uninstall").arg("foo"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ cargo_process("uninstall").arg("foo"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[REMOVING] {home}[..]bin[..]foo[..]
",
- home = cargo_home().display())));
+ home = cargo_home().display()
+ )),
+ );
assert_that(cargo_home(), is_not(has_installed_exe("foo")));
}
pkg("foo", "0.0.1");
pkg("bar", "0.0.2");
- assert_that(cargo_process("install").args(&["foo", "bar", "baz"]),
- execs().with_status(101).with_stderr(&format!("\
+ assert_that(
+ cargo_process("install").args(&["foo", "bar", "baz"]),
+ execs().with_status(101).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] foo v0.0.1 (registry `file://[..]`)
[INSTALLING] foo v0.0.1
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
error: some crates failed to install
",
- home = cargo_home().display())));
+ home = cargo_home().display()
+ )),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
assert_that(cargo_home(), has_installed_exe("bar"));
- assert_that(cargo_process("uninstall").args(&["foo", "bar"]),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ cargo_process("uninstall").args(&["foo", "bar"]),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[REMOVING] {home}[..]bin[..]foo[..]
[REMOVING] {home}[..]bin[..]bar[..]
[SUMMARY] Successfully uninstalled foo, bar!
",
- home = cargo_home().display())));
+ home = cargo_home().display()
+ )),
+ );
assert_that(cargo_home(), is_not(has_installed_exe("foo")));
assert_that(cargo_home(), is_not(has_installed_exe("bar")));
pkg("foo", "0.0.1");
pkg("foo", "0.0.2");
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ cargo_process("install").arg("foo"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] foo v0.0.2 (registry [..])
[INSTALLING] foo v0.0.2
[INSTALLING] {home}[..]bin[..]foo[..]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
- home = cargo_home().display())));
+ home = cargo_home().display()
+ )),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
}
#[test]
fn missing() {
pkg("foo", "0.0.1");
- assert_that(cargo_process("install").arg("bar"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("install").arg("bar"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry [..]
[ERROR] could not find `bar` in registry `[..]`
-"));
+",
+ ),
+ );
}
#[test]
fn bad_version() {
pkg("foo", "0.0.1");
- assert_that(cargo_process("install").arg("foo").arg("--vers=0.2.0"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("install").arg("foo").arg("--vers=0.2.0"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry [..]
[ERROR] could not find `foo` in registry `[..]` with version `=0.2.0`
-"));
+",
+ ),
+ );
}
#[test]
fn no_crate() {
- assert_that(cargo_process("install"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("install"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] `[..]` is not a crate root; specify a crate to install [..]
Caused by:
Caused by:
[..] (os error [..])
-"));
+",
+ ),
+ );
}
#[test]
let t4 = cargo_home();
fs::create_dir(root.join(".cargo")).unwrap();
- File::create(root.join(".cargo/config")).unwrap().write_all(format!("\
+ File::create(root.join(".cargo/config"))
+ .unwrap()
+ .write_all(
+ format!(
+ "\
[install]
root = '{}'
- ", t3.display()).as_bytes()).unwrap();
+ ",
+ t3.display()
+ ).as_bytes(),
+ )
+ .unwrap();
println!("install --root");
- assert_that(cargo_process("install").arg("foo")
- .arg("--root").arg(&t1)
- .env("CARGO_INSTALL_ROOT", &t2),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install")
+ .arg("foo")
+ .arg("--root")
+ .arg(&t1)
+ .env("CARGO_INSTALL_ROOT", &t2),
+ execs().with_status(0),
+ );
assert_that(&t1, has_installed_exe("foo"));
assert_that(&t2, is_not(has_installed_exe("foo")));
println!("install CARGO_INSTALL_ROOT");
- assert_that(cargo_process("install").arg("foo")
- .env("CARGO_INSTALL_ROOT", &t2),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install")
+ .arg("foo")
+ .env("CARGO_INSTALL_ROOT", &t2),
+ execs().with_status(0),
+ );
assert_that(&t2, has_installed_exe("foo"));
assert_that(&t3, is_not(has_installed_exe("foo")));
println!("install install.root");
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0));
+ assert_that(cargo_process("install").arg("foo"), execs().with_status(0));
assert_that(&t3, has_installed_exe("foo"));
assert_that(&t4, is_not(has_installed_exe("foo")));
println!("install cargo home");
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0));
+ assert_that(cargo_process("install").arg("foo"), execs().with_status(0));
assert_that(&t4, has_installed_exe("foo"));
}
#[test]
fn install_path() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
- assert_that(cargo_process("install").arg("--path").arg(".").cwd(p.root()),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("install")
+ .arg("--path")
+ .arg(".")
+ .cwd(p.root()),
+ execs().with_status(101).with_stderr(
+ "\
[INSTALLING] foo v0.1.0 [..]
[ERROR] binary `foo[..]` already exists in destination as part of `foo v0.1.0 [..]`
Add --force to overwrite
-"));
+",
+ ),
+ );
}
#[test]
fn multiple_crates_error() {
let p = git::repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/main.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("install")
+ .arg("--git")
+ .arg(p.url().to_string()),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] git repository [..]
[ERROR] multiple packages with binaries found: bar, foo
-"));
+",
+ ),
+ );
}
#[test]
fn multiple_crates_select() {
let p = git::repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/main.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--git").arg(p.url().to_string())
- .arg("foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install")
+ .arg("--git")
+ .arg(p.url().to_string())
+ .arg("foo"),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
assert_that(cargo_home(), is_not(has_installed_exe("bar")));
- assert_that(cargo_process("install").arg("--git").arg(p.url().to_string())
- .arg("bar"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install")
+ .arg("--git")
+ .arg(p.url().to_string())
+ .arg("bar"),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("bar"));
}
#[test]
fn multiple_crates_auto_binaries() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "a" }
- "#)
+ "#,
+ )
.file("src/main.rs", "extern crate bar; fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
}
#[test]
fn multiple_crates_auto_examples() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "extern crate bar;")
- .file("examples/foo.rs", "
+ .file(
+ "examples/foo.rs",
+ "
extern crate bar;
extern crate foo;
fn main() {}
- ")
- .file("a/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root())
- .arg("--example=foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install")
+ .arg("--path")
+ .arg(p.root())
+ .arg("--example=foo"),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
}
#[test]
fn no_binaries_or_examples() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] no packages found with binaries or examples
-"));
+",
+ ),
+ );
}
#[test]
fn no_binaries() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/foo.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()).arg("foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("install")
+ .arg("--path")
+ .arg(p.root())
+ .arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[INSTALLING] foo [..]
[ERROR] specified package has no binaries
-"));
+",
+ ),
+ );
}
#[test]
fn examples() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/foo.rs", "extern crate foo; fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root())
- .arg("--example=foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install")
+ .arg("--path")
+ .arg(p.root())
+ .arg("--example=foo"),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
}
#[test]
fn install_twice() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/foo-bin1.rs", "fn main() {}")
.file("src/bin/foo-bin2.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(0));
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(0),
+ );
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(101).with_stderr(
+ "\
[INSTALLING] foo v0.1.0 [..]
[ERROR] binary `foo-bin1[..]` already exists in destination as part of `foo v0.1.0 ([..])`
binary `foo-bin2[..]` already exists in destination as part of `foo v0.1.0 ([..])`
Add --force to overwrite
-"));
+",
+ ),
+ );
}
#[test]
fn install_force() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(0),
+ );
let p = project("foo2")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.2.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--force").arg("--path").arg(p.root()),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ cargo_process("install")
+ .arg("--force")
+ .arg("--path")
+ .arg(p.root()),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[INSTALLING] foo v0.2.0 ([..])
[COMPILING] foo v0.2.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
[REPLACING] {home}[..]bin[..]foo[..]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
- home = cargo_home().display())));
-
- assert_that(cargo_process("install").arg("--list"),
- execs().with_status(0).with_stdout("\
+ home = cargo_home().display()
+ )),
+ );
+
+ assert_that(
+ cargo_process("install").arg("--list"),
+ execs().with_status(0).with_stdout(
+ "\
foo v0.2.0 ([..]):
foo[..]
-"));
+",
+ ),
+ );
}
#[test]
fn install_force_partial_overlap() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/foo-bin1.rs", "fn main() {}")
.file("src/bin/foo-bin2.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(0),
+ );
let p = project("foo2")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.2.0"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/foo-bin2.rs", "fn main() {}")
.file("src/bin/foo-bin3.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--force").arg("--path").arg(p.root()),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ cargo_process("install")
+ .arg("--force")
+ .arg("--path")
+ .arg(p.root()),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[INSTALLING] foo v0.2.0 ([..])
[COMPILING] foo v0.2.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
[REPLACING] {home}[..]bin[..]foo-bin2[..]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
- home = cargo_home().display())));
-
- assert_that(cargo_process("install").arg("--list"),
- execs().with_status(0).with_stdout("\
+ home = cargo_home().display()
+ )),
+ );
+
+ assert_that(
+ cargo_process("install").arg("--list"),
+ execs().with_status(0).with_stdout(
+ "\
foo v0.1.0 ([..]):
foo-bin1[..]
foo v0.2.0 ([..]):
foo-bin2[..]
foo-bin3[..]
-"));
+",
+ ),
+ );
}
#[test]
fn install_force_bin() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/foo-bin1.rs", "fn main() {}")
.file("src/bin/foo-bin2.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(0),
+ );
let p = project("foo2")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.2.0"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/foo-bin1.rs", "fn main() {}")
.file("src/bin/foo-bin2.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--force")
- .arg("--bin")
- .arg("foo-bin2")
- .arg("--path")
- .arg(p.root()),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ cargo_process("install")
+ .arg("--force")
+ .arg("--bin")
+ .arg("foo-bin2")
+ .arg("--path")
+ .arg(p.root()),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[INSTALLING] foo v0.2.0 ([..])
[COMPILING] foo v0.2.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
[REPLACING] {home}[..]bin[..]foo-bin2[..]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
- home = cargo_home().display())));
-
- assert_that(cargo_process("install").arg("--list"),
- execs().with_status(0).with_stdout("\
+ home = cargo_home().display()
+ )),
+ );
+
+ assert_that(
+ cargo_process("install").arg("--list"),
+ execs().with_status(0).with_stdout(
+ "\
foo v0.1.0 ([..]):
foo-bin1[..]
foo v0.2.0 ([..]):
foo-bin2[..]
-"));
+",
+ ),
+ );
}
#[test]
fn compile_failure() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(101).with_stderr_contains(
+ "\
[ERROR] failed to compile `foo v0.1.0 ([..])`, intermediate artifacts can be \
found at `[..]target`
Could not compile `foo`.
To learn more, run the command again with --verbose.
-"));
+",
+ ),
+ );
}
#[test]
fn git_repo() {
let p = git::repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
// use `--locked` to test that we don't even try to write a lockfile
- assert_that(cargo_process("install").arg("--locked").arg("--git").arg(p.url().to_string()),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ cargo_process("install")
+ .arg("--locked")
+ .arg("--git")
+ .arg(p.url().to_string()),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] git repository `[..]`
[INSTALLING] foo v0.1.0 ([..])
[COMPILING] foo v0.1.0 ([..])
[INSTALLING] {home}[..]bin[..]foo[..]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
- home = cargo_home().display())));
+ home = cargo_home().display()
+ )),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
assert_that(cargo_home(), has_installed_exe("foo"));
}
pkg("bar", "0.2.1");
pkg("bar", "0.2.2");
- assert_that(cargo_process("install").arg("--list"),
- execs().with_status(0).with_stdout(""));
-
- assert_that(cargo_process("install").arg("bar").arg("--vers").arg("=0.2.1"),
- execs().with_status(0));
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0));
- assert_that(cargo_process("install").arg("--list"),
- execs().with_status(0).with_stdout("\
+ assert_that(
+ cargo_process("install").arg("--list"),
+ execs().with_status(0).with_stdout(""),
+ );
+
+ assert_that(
+ cargo_process("install")
+ .arg("bar")
+ .arg("--vers")
+ .arg("=0.2.1"),
+ execs().with_status(0),
+ );
+ assert_that(cargo_process("install").arg("foo"), execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("--list"),
+ execs().with_status(0).with_stdout(
+ "\
bar v0.2.1:
bar[..]
foo v0.0.1:
foo[..]
-"));
+",
+ ),
+ );
}
#[test]
fn list_error() {
pkg("foo", "0.0.1");
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0));
- assert_that(cargo_process("install").arg("--list"),
- execs().with_status(0).with_stdout("\
+ assert_that(cargo_process("install").arg("foo"), execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("--list"),
+ execs().with_status(0).with_stdout(
+ "\
foo v0.0.1:
foo[..]
-"));
+",
+ ),
+ );
let mut worldfile_path = cargo_home();
worldfile_path.push(".crates.toml");
let mut worldfile = OpenOptions::new()
- .write(true)
- .open(worldfile_path)
- .expect(".crates.toml should be there");
+ .write(true)
+ .open(worldfile_path)
+ .expect(".crates.toml should be there");
worldfile.write_all(b"\x00").unwrap();
drop(worldfile);
- assert_that(cargo_process("install").arg("--list").arg("--verbose"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("install").arg("--list").arg("--verbose"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse crate metadata at `[..]`
Caused by:
Caused by:
unexpected character[..]
-"));
+",
+ ),
+ );
}
#[test]
fn uninstall_pkg_does_not_exist() {
- assert_that(cargo_process("uninstall").arg("foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("uninstall").arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] package id specification `foo` matched no packages
-"));
+",
+ ),
+ );
}
#[test]
fn uninstall_bin_does_not_exist() {
pkg("foo", "0.0.1");
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0));
- assert_that(cargo_process("uninstall").arg("foo").arg("--bin=bar"),
- execs().with_status(101).with_stderr("\
+ assert_that(cargo_process("install").arg("foo"), execs().with_status(0));
+ assert_that(
+ cargo_process("uninstall").arg("foo").arg("--bin=bar"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1`
-"));
+",
+ ),
+ );
}
#[test]
fn uninstall_piecemeal() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/foo.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
assert_that(cargo_home(), has_installed_exe("bar"));
- assert_that(cargo_process("uninstall").arg("foo").arg("--bin=bar"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ cargo_process("uninstall").arg("foo").arg("--bin=bar"),
+ execs().with_status(0).with_stderr(
+ "\
[REMOVING] [..]bar[..]
-"));
+",
+ ),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
assert_that(cargo_home(), is_not(has_installed_exe("bar")));
- assert_that(cargo_process("uninstall").arg("foo").arg("--bin=foo"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ cargo_process("uninstall").arg("foo").arg("--bin=foo"),
+ execs().with_status(0).with_stderr(
+ "\
[REMOVING] [..]foo[..]
-"));
+",
+ ),
+ );
assert_that(cargo_home(), is_not(has_installed_exe("foo")));
- assert_that(cargo_process("uninstall").arg("foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ cargo_process("uninstall").arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] package id specification `foo` matched no packages
-"));
+",
+ ),
+ );
}
#[test]
fn subcommand_works_out_of_the_box() {
Package::new("cargo-foo", "1.0.0")
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
println!("bar");
}
- "#)
+ "#,
+ )
.publish();
- assert_that(cargo_process("install").arg("cargo-foo"),
- execs().with_status(0));
- assert_that(cargo_process("foo"),
- execs().with_status(0).with_stdout("bar\n"));
- assert_that(cargo_process("--list"),
- execs().with_status(0).with_stdout_contains(" foo\n"));
+ assert_that(
+ cargo_process("install").arg("cargo-foo"),
+ execs().with_status(0),
+ );
+ assert_that(
+ cargo_process("foo"),
+ execs().with_status(0).with_stdout("bar\n"),
+ );
+ assert_that(
+ cargo_process("--list"),
+ execs().with_status(0).with_stdout_contains(" foo\n"),
+ );
}
#[test]
fn installs_from_cwd_by_default() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").cwd(p.root()),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install").cwd(p.root()),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
}
#[test]
fn do_not_rebuilds_on_local_install() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--release"),
- execs().with_status(0));
- assert_that(cargo_process("install").arg("--path").arg(p.root()),
- execs().with_status(0).with_stderr("[INSTALLING] [..]
+ assert_that(p.cargo("build").arg("--release"), execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("--path").arg(p.root()),
+ execs().with_status(0).with_stderr(
+ "[INSTALLING] [..]
[FINISHED] release [optimized] target(s) in [..]
[INSTALLING] [..]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
-"));
+",
+ ),
+ );
assert!(p.build_dir().exists());
assert!(p.release_bin("foo").exists());
#[test]
fn reports_unsuccessful_subcommand_result() {
Package::new("cargo-fail", "1.0.0")
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
panic!();
}
- "#)
+ "#,
+ )
.publish();
- assert_that(cargo_process("install").arg("cargo-fail"),
- execs().with_status(0));
- assert_that(cargo_process("--list"),
- execs().with_status(0).with_stdout_contains(" fail\n"));
- assert_that(cargo_process("fail"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ cargo_process("install").arg("cargo-fail"),
+ execs().with_status(0),
+ );
+ assert_that(
+ cargo_process("--list"),
+ execs().with_status(0).with_stdout_contains(" fail\n"),
+ );
+ assert_that(
+ cargo_process("fail"),
+ execs().with_status(101).with_stderr_contains(
+ "\
thread '[..]' panicked at 'explicit panic', [..]
-"));
+",
+ ),
+ );
}
#[test]
fn git_with_lockfile() {
let p = git::repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "fn main() {}")
- .file("Cargo.lock", r#"
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "foo"
version = "0.1.0"
[[package]]
name = "bar"
version = "0.1.0"
- "#)
+ "#,
+ )
.build();
- assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install")
+ .arg("--git")
+ .arg(p.url().to_string()),
+ execs().with_status(0),
+ );
}
#[test]
fn q_silences_warnings() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install").arg("-q").arg("--path").arg(p.root()),
- execs().with_status(0).with_stderr(""));
+ assert_that(
+ cargo_process("install")
+ .arg("-q")
+ .arg("--path")
+ .arg(p.root()),
+ execs().with_status(0).with_stderr(""),
+ );
}
#[test]
perms.set_readonly(true);
fs::set_permissions(dir, perms).unwrap();
- assert_that(cargo_process("install").arg("foo").cwd(dir),
- execs().with_status(0));
+ assert_that(
+ cargo_process("install").arg("foo").cwd(dir),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
}
fn use_path_workspace() {
Package::new("foo", "1.0.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[workspace]
members = ["baz"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.1.0"
[dependencies]
foo = "1"
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", "")
.build();
fn dev_dependencies_no_check() {
Package::new("foo", "1.0.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dev-dependencies]
baz = "1.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
// --bins is needed because of #5134
- assert_that(p.cargo("build").arg("--bins"),
- execs().with_status(101));
- assert_that(p.cargo("install").arg("--bins"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--bins"), execs().with_status(101));
+ assert_that(p.cargo("install").arg("--bins"), execs().with_status(0));
}
#[test]
fn dev_dependencies_lock_file_untouched() {
Package::new("foo", "1.0.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dev-dependencies]
bar = { path = "a" }
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
// --bins is needed because of #5134
- assert_that(p.cargo("build").arg("--bins"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--bins"), execs().with_status(0));
let lock = p.read_lockfile();
- assert_that(p.cargo("install").arg("--bins"),
- execs().with_status(0));
+ assert_that(p.cargo("install").arg("--bins"), execs().with_status(0));
let lock2 = p.read_lockfile();
assert!(lock == lock2, "different lockfiles");
}
pkg("foo", "0.1.1");
pkg("foo", "0.1.2");
- assert_that(cargo_process("install").arg("foo").arg("--vers").arg("0.1.1"),
- execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ cargo_process("install")
+ .arg("foo")
+ .arg("--vers")
+ .arg("0.1.1"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[DOWNLOADING] foo v0.1.1 (registry [..])
-"));
+",
+ ),
+ );
}
#[test]
pkg("foo", "0.1.1");
pkg("foo", "0.1.2");
- assert_that(cargo_process("install").arg("foo").arg("--version").arg("0.1.1"),
- execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ cargo_process("install")
+ .arg("foo")
+ .arg("--version")
+ .arg("0.1.1"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[DOWNLOADING] foo v0.1.1 (registry [..])
-"));
+",
+ ),
+ );
}
#[test]
pkg("foo", "0.1.1");
pkg("foo", "0.1.2");
- assert_that(cargo_process("install").arg("foo").arg("--version").arg("0.1.1").arg("--vers").arg("0.1.2"),
- execs().with_status(1).with_stderr_contains("\
+ assert_that(
+ cargo_process("install")
+ .arg("foo")
+ .arg("--version")
+ .arg("0.1.1")
+ .arg("--vers")
+ .arg("0.1.2"),
+ execs().with_status(1).with_stderr_contains(
+ "\
error: The argument '--version <VERSION>' was provided more than once, \
but cannot be used multiple times
-"));
+",
+ ),
+ );
}
#[test]
fn legacy_version_requirement() {
pkg("foo", "0.1.1");
- assert_that(cargo_process("install").arg("foo").arg("--vers").arg("0.1"),
- execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ cargo_process("install").arg("foo").arg("--vers").arg("0.1"),
+ execs().with_status(0).with_stderr_contains(
+ "\
warning: the `--vers` provided, `0.1`, is not a valid semver version
historically Cargo treated this as a semver version requirement accidentally
and will continue to do so, but this behavior will be removed eventually
-"));
+",
+ ),
+ );
}
#[test]
fn uninstall_multiple_and_some_pkg_does_not_exist() {
pkg("foo", "0.0.1");
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0));
+ assert_that(cargo_process("install").arg("foo"), execs().with_status(0));
- assert_that(cargo_process("uninstall").args(&["foo", "bar"]),
- execs().with_status(101).with_stderr(&format!("\
+ assert_that(
+ cargo_process("uninstall").args(&["foo", "bar"]),
+ execs().with_status(101).with_stderr(&format!(
+ "\
[REMOVING] {home}[..]bin[..]foo[..]
error: package id specification `bar` matched no packages
[SUMMARY] Successfully uninstalled foo! Failed to uninstall bar (see error(s) above).
error: some packages failed to uninstall
",
- home = cargo_home().display())));
+ home = cargo_home().display()
+ )),
+ );
assert_that(cargo_home(), is_not(has_installed_exe("foo")));
assert_that(cargo_home(), is_not(has_installed_exe("bar")));
#[test]
fn custom_target_dir_for_git_source() {
let p = git::repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(cargo_process("install")
- .arg("--git").arg(p.url().to_string()),
- execs().with_status(0));
- assert_that(&paths::root().join("target/release"),
- is_not(existing_dir()));
-
- assert_that(cargo_process("install").arg("--force")
- .arg("--git").arg(p.url().to_string())
- .env("CARGO_TARGET_DIR", "target"),
- execs().with_status(0));
- assert_that(&paths::root().join("target/release"),
- existing_dir());
+ assert_that(
+ cargo_process("install")
+ .arg("--git")
+ .arg(p.url().to_string()),
+ execs().with_status(0),
+ );
+ assert_that(
+ &paths::root().join("target/release"),
+ is_not(existing_dir()),
+ );
+
+ assert_that(
+ cargo_process("install")
+ .arg("--force")
+ .arg("--git")
+ .arg(p.url().to_string())
+ .env("CARGO_TARGET_DIR", "target"),
+ execs().with_status(0),
+ );
+ assert_that(&paths::root().join("target/release"), existing_dir());
}
#[test]
Package::new("foo", "0.1.0")
.dep("bar", "0.1")
.file("src/lib.rs", "")
- .file("src/main.rs", "
+ .file(
+ "src/main.rs",
+ "
extern crate foo;
extern crate bar;
fn main() {}
- ")
- .file("Cargo.lock", r#"
+ ",
+ )
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "bar"
version = "0.1.0"
dependencies = [
"bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
-"#)
+"#,
+ )
.publish();
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0));
+ assert_that(cargo_process("install").arg("foo"), execs().with_status(0));
}
#[test]
Package::new("foo", "0.1.0")
.dep("bar", "0.1")
.file("src/lib.rs", "")
- .file("src/main.rs", "
+ .file(
+ "src/main.rs",
+ "
extern crate foo;
extern crate bar;
fn main() {}
- ")
- .file("Cargo.lock", r#"
+ ",
+ )
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "bar"
version = "0.1.0"
dependencies = [
"bar 0.1.0",
]
-"#)
+"#,
+ )
.publish();
- assert_that(cargo_process("install").arg("foo"),
- execs().with_status(0));
+ assert_that(cargo_process("install").arg("foo"), execs().with_status(0));
}
use std::thread;
use std::process::Command;
-use cargotest::support::{project, execs, cargo_exe};
+use cargotest::support::{cargo_exe, execs, project};
use hamcrest::assert_that;
#[test]
fn jobserver_exists() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("build.rs", r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ r#"
use std::env;
fn main() {
fn validate(_: &str) {
// a little too complicated for a test...
}
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn makes_jobserver_used() {
- let make = if cfg!(windows) {"mingw32-make"} else {"make"};
+ let make = if cfg!(windows) {
+ "mingw32-make"
+ } else {
+ "make"
+ };
if Command::new(make).arg("--version").output().is_err() {
- return
+ return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
d1 = { path = "d1" }
d2 = { path = "d2" }
d3 = { path = "d3" }
- "#)
- .file("src/lib.rs", "")
- .file("d1/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
authors = []
build = "../dbuild.rs"
- "#)
- .file("d1/src/lib.rs", "")
- .file("d2/Cargo.toml", r#"
+ "#,
+ )
+ .file("d1/src/lib.rs", "")
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.1"
authors = []
build = "../dbuild.rs"
- "#)
- .file("d2/src/lib.rs", "")
- .file("d3/Cargo.toml", r#"
+ "#,
+ )
+ .file("d2/src/lib.rs", "")
+ .file(
+ "d3/Cargo.toml",
+ r#"
[package]
name = "d3"
version = "0.0.1"
authors = []
build = "../dbuild.rs"
- "#)
- .file("d3/src/lib.rs", "")
- .file("dbuild.rs", r#"
+ "#,
+ )
+ .file("d3/src/lib.rs", "")
+ .file(
+ "dbuild.rs",
+ r#"
use std::net::TcpStream;
use std::env;
use std::io::Read;
let mut v = Vec::new();
stream.read_to_end(&mut v).unwrap();
}
- "#)
- .file("Makefile", "\
+ "#,
+ )
+ .file(
+ "Makefile",
+ "\
all:
\t+$(CARGO) build
-")
+",
+ )
.build();
let l = TcpListener::bind("127.0.0.1:0").unwrap();
drop((a2, a3));
});
- assert_that(p.process(make)
- .env("CARGO", cargo_exe())
- .env("ADDR", addr.to_string())
- .arg("-j2"),
- execs().with_status(0));
+ assert_that(
+ p.process(make)
+ .env("CARGO", cargo_exe())
+ .env("ADDR", addr.to_string())
+ .arg("-j2"),
+ execs().with_status(0),
+ );
child.join().unwrap();
}
#[test]
fn jobserver_and_j() {
- let make = if cfg!(windows) {"mingw32-make"} else {"make"};
+ let make = if cfg!(windows) {
+ "mingw32-make"
+ } else {
+ "make"
+ };
if Command::new(make).arg("--version").output().is_err() {
- return
+ return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("Makefile", "\
+ .file(
+ "Makefile",
+ "\
all:
\t+$(CARGO) build -j2
-")
+",
+ )
.build();
- assert_that(p.process(make)
- .env("CARGO", cargo_exe())
- .arg("-j2"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.process(make).env("CARGO", cargo_exe()).arg("-j2"),
+ execs().with_status(0).with_stderr(
+ "\
warning: a `-j` argument was passed to Cargo but Cargo is also configured \
with an external jobserver in its environment, ignoring the `-j` parameter
[COMPILING] [..]
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
use cargotest::support::paths::{self, CargoPathExt};
use cargotest::support::registry::Package;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
fn setup() {
let root = paths::root();
t!(fs::create_dir(&root.join(".cargo")));
- t!(t!(File::create(root.join(".cargo/config"))).write_all(br#"
+ t!(t!(File::create(root.join(".cargo/config"))).write_all(
+ br#"
[source.crates-io]
registry = 'https://wut'
replace-with = 'my-awesome-local-registry'
[source.my-awesome-local-registry]
local-registry = 'registry'
- "#));
+ "#
+ ));
}
#[test]
fn simple() {
setup();
Package::new("foo", "0.0.1")
- .local(true)
- .file("src/lib.rs", "pub fn foo() {}")
- .publish();
+ .local(true)
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.0.1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
pub fn bar() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UNPACKING] foo v0.0.1 ([..])
[COMPILING] foo v0.0.1
[COMPILING] bar v0.0.1 ({dir})
[FINISHED] [..]
",
- dir = p.url())));
- assert_that(p.cargo("build"), execs().with_status(0).with_stderr("\
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] [..]
-"));
+",
+ ),
+ );
assert_that(p.cargo("test"), execs().with_status(0));
}
setup();
Package::new("foo", "0.0.1").local(true).publish();
Package::new("foo", "0.1.0")
- .local(true)
- .file("src/lib.rs", "pub fn foo() {}")
- .publish();
+ .local(true)
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "*"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
pub fn bar() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UNPACKING] foo v0.1.0 ([..])
[COMPILING] foo v0.1.0
[COMPILING] bar v0.0.1 ({dir})
[FINISHED] [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
Package::new("foo", "0.2.0")
- .local(true)
- .file("src/lib.rs", "pub fn foo() {}")
- .publish();
-
- assert_that(p.cargo("update").arg("-v"),
- execs().with_status(0).with_stderr("\
+ .local(true)
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
+
+ assert_that(
+ p.cargo("update").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] foo v0.1.0 -> v0.2.0
-"));
+",
+ ),
+ );
}
#[test]
fn multiple_names() {
setup();
Package::new("foo", "0.0.1")
- .local(true)
- .file("src/lib.rs", "pub fn foo() {}")
- .publish();
+ .local(true)
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
Package::new("bar", "0.1.0")
- .local(true)
- .file("src/lib.rs", "pub fn bar() {}")
- .publish();
+ .local(true)
+ .file("src/lib.rs", "pub fn bar() {}")
+ .publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "local"
version = "0.0.1"
[dependencies]
foo = "*"
bar = "*"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
extern crate bar;
pub fn local() {
foo::foo();
bar::bar();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UNPACKING] [..]
[UNPACKING] [..]
[COMPILING] [..]
[COMPILING] local v0.0.1 ({dir})
[FINISHED] [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn interdependent() {
setup();
Package::new("foo", "0.0.1")
- .local(true)
- .file("src/lib.rs", "pub fn foo() {}")
- .publish();
+ .local(true)
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
Package::new("bar", "0.1.0")
- .local(true)
- .dep("foo", "*")
- .file("src/lib.rs", "extern crate foo; pub fn bar() {}")
- .publish();
+ .local(true)
+ .dep("foo", "*")
+ .file("src/lib.rs", "extern crate foo; pub fn bar() {}")
+ .publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "local"
version = "0.0.1"
[dependencies]
foo = "*"
bar = "*"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
extern crate bar;
pub fn local() {
foo::foo();
bar::bar();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UNPACKING] [..]
[UNPACKING] [..]
[COMPILING] foo v0.0.1
[COMPILING] local v0.0.1 ({dir})
[FINISHED] [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn path_dep_rewritten() {
setup();
Package::new("foo", "0.0.1")
- .local(true)
- .file("src/lib.rs", "pub fn foo() {}")
- .publish();
+ .local(true)
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
Package::new("bar", "0.1.0")
- .local(true)
- .dep("foo", "*")
- .file("Cargo.toml", r#"
+ .local(true)
+ .dep("foo", "*")
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
foo = { path = "foo", version = "*" }
- "#)
- .file("src/lib.rs", "extern crate foo; pub fn bar() {}")
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/lib.rs", "extern crate foo; pub fn bar() {}")
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("foo/src/lib.rs", "pub fn foo() {}")
- .publish();
+ "#,
+ )
+ .file("foo/src/lib.rs", "pub fn foo() {}")
+ .publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "local"
version = "0.0.1"
[dependencies]
foo = "*"
bar = "*"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
extern crate bar;
pub fn local() {
foo::foo();
bar::bar();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UNPACKING] [..]
[UNPACKING] [..]
[COMPILING] foo v0.0.1
[COMPILING] local v0.0.1 ({dir})
[FINISHED] [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn invalid_dir_bad() {
setup();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "local"
version = "0.0.1"
[dependencies]
foo = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[source.crates-io]
registry = 'https://wut'
replace-with = 'my-awesome-local-directory'
[source.my-awesome-local-directory]
local-registry = '/path/to/nowhere'
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to load source for a dependency on `foo`
Caused by:
Caused by:
local registry path is not a directory: [..]path[..]to[..]nowhere
-"));
+",
+ ),
+ );
}
#[test]
t!(fs::rename(&config, &config_tmp));
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "local"
version = "0.0.1"
[dependencies]
foo = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
config.rm_rf();
t!(fs::rename(&config_tmp, &config));
Package::new("foo", "0.0.1")
- .file("src/lib.rs", "invalid")
- .local(true)
- .publish();
-
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ .file("src/lib.rs", "invalid")
+ .local(true)
+ .publish();
+
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] checksum for `foo v0.0.1` changed between lock files
this could be indicative of a few possible errors:
unable to verify that `foo v0.0.1` is the same as when the lockfile was generated
-"));
+",
+ ),
+ );
}
#[test]
fn crates_io_registry_url_is_optional() {
let root = paths::root();
t!(fs::create_dir(&root.join(".cargo")));
- t!(t!(File::create(root.join(".cargo/config"))).write_all(br#"
+ t!(t!(File::create(root.join(".cargo/config"))).write_all(
+ br#"
[source.crates-io]
replace-with = 'my-awesome-local-registry'
[source.my-awesome-local-registry]
local-registry = 'registry'
- "#));
+ "#
+ ));
Package::new("foo", "0.0.1")
- .local(true)
- .file("src/lib.rs", "pub fn foo() {}")
- .publish();
+ .local(true)
+ .file("src/lib.rs", "pub fn foo() {}")
+ .publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.0.1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
pub fn bar() {
foo::foo();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UNPACKING] foo v0.0.1 ([..])
[COMPILING] foo v0.0.1
[COMPILING] bar v0.0.1 ({dir})
[FINISHED] [..]
",
- dir = p.url())));
- assert_that(p.cargo("build"), execs().with_status(0).with_stderr("\
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] [..]
-"));
+",
+ ),
+ );
assert_that(p.cargo("test"), execs().with_status(0));
}
use cargotest::support::git;
use cargotest::support::registry::Package;
-use cargotest::support::{execs, project, lines_match};
+use cargotest::support::{execs, lines_match, project};
use hamcrest::assert_that;
#[test]
fn oldest_lockfile_still_works() {
- let cargo_commands = vec![
- "build",
- "update"
- ];
+ let cargo_commands = vec!["build", "update"];
for cargo_command in cargo_commands {
oldest_lockfile_still_works_with_command(cargo_command);
}
fn oldest_lockfile_still_works_with_command(cargo_command: &str) {
Package::new("foo", "0.1.0").publish();
- let expected_lockfile =
-r#"[[package]]
+ let expected_lockfile = r#"[[package]]
name = "foo"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "[..]"
"#;
- let old_lockfile =
-r#"[root]
+ let old_lockfile = r#"[root]
name = "zzz"
version = "0.0.1"
dependencies = [
"#;
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "zzz"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("Cargo.lock", old_lockfile)
.build();
- assert_that(p.cargo(cargo_command),
- execs().with_status(0));
+ assert_that(p.cargo(cargo_command), execs().with_status(0));
let lock = p.read_lockfile();
for (l, r) in expected_lockfile.lines().zip(lock.lines()) {
assert_eq!(lock.lines().count(), expected_lockfile.lines().count());
}
-
#[test]
fn frozen_flag_preserves_old_lockfile() {
let cksum = Package::new("foo", "0.1.0").publish();
[metadata]
"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}"
"#,
- cksum,
+ cksum,
);
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "zzz"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("Cargo.lock", &old_lockfile)
.build();
- assert_that(p.cargo("build").arg("--locked"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--locked"), execs().with_status(0));
let lock = p.read_lockfile();
for (l, r) in old_lockfile.lines().zip(lock.lines()) {
assert_eq!(lock.lines().count(), old_lockfile.lines().count());
}
-
#[test]
fn totally_wild_checksums_works() {
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("Cargo.lock", r#"
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "bar"
version = "0.0.1"
[metadata]
"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
"checksum foo 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
-"#);
+"#,
+ );
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
let lock = p.read_lockfile();
- assert!(lock.starts_with(r#"
+ assert!(
+ lock.starts_with(
+ r#"
[[package]]
name = "bar"
version = "0.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
-"#.trim()));
+"#.trim()
+ )
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("Cargo.lock", r#"
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "bar"
version = "0.0.1"
[metadata]
"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
-"#);
+"#,
+ );
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry `[..]`
error: checksum for `foo v0.1.0` changed between lock files
unable to verify that `foo v0.1.0` is the same as when the lockfile was generated
-"));
+",
+ ),
+ );
}
// If the checksum is unlisted in the lockfile (e.g. <none>) yet we can
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("Cargo.lock", r#"
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "bar"
version = "0.0.1"
[metadata]
"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "<none>"
-"#);
+"#,
+ );
let p = p.build();
- assert_that(p.cargo("fetch"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("fetch"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry `[..]`
error: checksum for `foo v0.1.0` was not previously calculated, but a checksum \
could now be calculated
older implementation does not
* the lock file is corrupt
-"));
+",
+ ),
+ );
}
// If the checksum is listed in the lockfile yet we cannot calculate it (e.g.
#[test]
fn listed_checksum_bad_if_we_cannot_compute() {
let git = git::new("foo", |p| {
- p.file("Cargo.toml", r#"
+ p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
- "#)
- .file("src/lib.rs", "")
+ "#,
+ ).file("src/lib.rs", "")
}).unwrap();
let p = project("bar")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = {{ git = '{}' }}
- "#, git.url()))
+ "#,
+ git.url()
+ ),
+ )
.file("src/lib.rs", "")
- .file("Cargo.lock", &format!(r#"
+ .file(
+ "Cargo.lock",
+ &format!(
+ r#"
[[package]]
name = "bar"
version = "0.0.1"
[metadata]
"checksum foo 0.1.0 (git+{0})" = "checksum"
-"#, git.url()));
+"#,
+ git.url()
+ ),
+ );
let p = p.build();
- assert_that(p.cargo("fetch"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("fetch"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] git repository `[..]`
error: checksum for `foo v0.1.0 ([..])` could not be calculated, but a \
checksum is listed in the existing lock file[..]
unable to verify that `foo v0.1.0 ([..])` is the same as when the lockfile was generated
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "");
let p = p.build();
"#;
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("Cargo.lock", lockfile);
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("build").arg("--locked"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--locked"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry `[..]`
error: the lock file needs to be updated but --locked was passed to prevent this
-"));
+",
+ ),
+ );
}
use std::fs::{self, File};
use toml;
-use cargotest::{ChannelChanger, cargo_process};
+use cargotest::{cargo_process, ChannelChanger};
use cargotest::support::execs;
use cargotest::support::registry::registry;
use cargotest::install::cargo_home;
fn setup_new_credentials() {
let config = cargo_home().join("credentials");
t!(fs::create_dir_all(config.parent().unwrap()));
- t!(t!(File::create(&config)).write_all(format!(r#"
+ t!(t!(File::create(&config)).write_all(
+ format!(
+ r#"
token = "{token}"
- "#, token = ORIGINAL_TOKEN)
- .as_bytes()));
+ "#,
+ token = ORIGINAL_TOKEN
+ ).as_bytes()
+ ));
}
fn check_token(expected_token: &str, registry: Option<&str>) -> bool {
-
let credentials = cargo_home().join("credentials");
assert_that(&credentials, existing_file());
let mut contents = String::new();
- File::open(&credentials).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&credentials)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
let toml: toml::Value = contents.parse().unwrap();
let token = match (registry, toml) {
// A registry has been provided, so check that the token exists in a
// table for the registry.
- (Some(registry), toml::Value::Table(table)) => {
- table.get("registries")
- .and_then(|registries_table| registries_table.get(registry))
- .and_then(|registry_table| {
- match registry_table.get("token") {
- Some(&toml::Value::String(ref token)) => Some(token.as_str().to_string()),
- _ => None,
- }
- })
- },
+ (Some(registry), toml::Value::Table(table)) => table
+ .get("registries")
+ .and_then(|registries_table| registries_table.get(registry))
+ .and_then(|registry_table| match registry_table.get("token") {
+ Some(&toml::Value::String(ref token)) => Some(token.as_str().to_string()),
+ _ => None,
+ }),
// There is no registry provided, so check the global token instead.
- (None, toml::Value::Table(table)) => {
- table.get("registry")
- .and_then(|registry_table| registry_table.get("token"))
- .and_then(|v| {
- match v {
- &toml::Value::String(ref token) => Some(token.as_str().to_string()),
- _ => None,
- }
- })
- }
- _ => None
+ (None, toml::Value::Table(table)) => table
+ .get("registry")
+ .and_then(|registry_table| registry_table.get("token"))
+ .and_then(|v| match v {
+ &toml::Value::String(ref token) => Some(token.as_str().to_string()),
+ _ => None,
+ }),
+ _ => None,
};
if let Some(token_val) = token {
fn login_with_old_credentials() {
setup_old_credentials();
- assert_that(cargo_process().arg("login")
- .arg("--host").arg(registry().to_string()).arg(TOKEN),
- execs().with_status(0));
+ assert_that(
+ cargo_process()
+ .arg("login")
+ .arg("--host")
+ .arg(registry().to_string())
+ .arg(TOKEN),
+ execs().with_status(0),
+ );
let config = cargo_home().join("config");
assert_that(&config, existing_file());
let mut contents = String::new();
- File::open(&config).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&config)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert_eq!(CONFIG_FILE, contents);
// Ensure that we get the new token for the registry
fn login_with_new_credentials() {
setup_new_credentials();
- assert_that(cargo_process().arg("login")
- .arg("--host").arg(registry().to_string()).arg(TOKEN),
- execs().with_status(0));
+ assert_that(
+ cargo_process()
+ .arg("login")
+ .arg("--host")
+ .arg(registry().to_string())
+ .arg(TOKEN),
+ execs().with_status(0),
+ );
let config = cargo_home().join("config");
assert_that(&config, is_not(existing_file()));
#[test]
fn login_without_credentials() {
- assert_that(cargo_process().arg("login")
- .arg("--host").arg(registry().to_string()).arg(TOKEN),
- execs().with_status(0));
+ assert_that(
+ cargo_process()
+ .arg("login")
+ .arg("--host")
+ .arg(registry().to_string())
+ .arg(TOKEN),
+ execs().with_status(0),
+ );
let config = cargo_home().join("config");
assert_that(&config, is_not(existing_file()));
setup_old_credentials();
setup_new_credentials();
- assert_that(cargo_process().arg("login")
- .arg("--host").arg(registry().to_string()).arg(TOKEN),
- execs().with_status(0));
+ assert_that(
+ cargo_process()
+ .arg("login")
+ .arg("--host")
+ .arg(registry().to_string())
+ .arg(TOKEN),
+ execs().with_status(0),
+ );
let config = Config::new(Shell::new(), cargo_home(), cargo_home());
let reg = "test-reg";
- assert_that(cargo_process().arg("login").masquerade_as_nightly_cargo()
- .arg("--registry").arg(reg).arg(TOKEN).arg("-Zunstable-options"),
- execs().with_status(0));
+ assert_that(
+ cargo_process()
+ .arg("login")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg(reg)
+ .arg(TOKEN)
+ .arg("-Zunstable-options"),
+ execs().with_status(0),
+ );
// Ensure that we have not updated the default token
assert!(check_token(ORIGINAL_TOKEN, None));
use hamcrest::assert_that;
use cargotest::support::registry::Package;
-use cargotest::support::{project, execs, basic_bin_manifest, basic_lib_manifest, main_file};
+use cargotest::support::{basic_bin_manifest, basic_lib_manifest, execs, main_file, project};
#[test]
fn cargo_metadata_simple() {
.file("Cargo.toml", &basic_bin_manifest("foo"))
.build();
- assert_that(p.cargo("metadata"), execs().with_json(r#"
+ assert_that(
+ p.cargo("metadata"),
+ execs().with_json(
+ r#"
{
"packages": [
{
"target_directory": "[..]foo[/]target",
"version": 1,
"workspace_root": "[..][/]foo"
- }"#));
+ }"#,
+ ),
+ );
}
#[test]
execs().with_stderr("\
[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems"));
- assert_that(p.cargo("metadata").arg("--format-version").arg("1"),
- execs().with_stderr(""));
+ assert_that(
+ p.cargo("metadata").arg("--format-version").arg("1"),
+ execs().with_stderr(""),
+ );
}
#[test]
fn library_with_several_crate_types() {
let p = project("foo")
.file("src/lib.rs", "")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.5.0"
[lib]
crate-type = ["lib", "staticlib"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("metadata"), execs().with_json(r#"
+ assert_that(
+ p.cargo("metadata"),
+ execs().with_json(
+ r#"
{
"packages": [
{
"target_directory": "[..]foo[/]target",
"version": 1,
"workspace_root": "[..][/]foo"
- }"#));
+ }"#,
+ ),
+ );
}
#[test]
fn library_with_features() {
let p = project("foo")
.file("src/lib.rs", "")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.5.0"
default = ["default_feat"]
default_feat = []
optional_feat = []
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("metadata"), execs().with_json(r#"
+ assert_that(
+ p.cargo("metadata"),
+ execs().with_json(
+ r#"
{
"packages": [
{
"target_directory": "[..]foo[/]target",
"version": 1,
"workspace_root": "[..][/]foo"
- }"#));
+ }"#,
+ ),
+ );
}
#[test]
fn cargo_metadata_with_deps_and_version() {
let p = project("foo")
.file("src/foo.rs", "")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.build();
Package::new("baz", "0.0.1").publish();
Package::new("bar", "0.0.1").dep("baz", "0.0.1").publish();
- assert_that(p.cargo("metadata")
- .arg("-q")
- .arg("--format-version").arg("1"),
- execs().with_json(r#"
+ assert_that(
+ p.cargo("metadata")
+ .arg("-q")
+ .arg("--format-version")
+ .arg("1"),
+ execs().with_json(
+ r#"
{
"packages": [
{
"target_directory": "[..]foo[/]target",
"version": 1,
"workspace_root": "[..][/]foo"
- }"#));
+ }"#,
+ ),
+ );
}
#[test]
let p = project("foo")
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[[example]]
name = "ex"
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("metadata"), execs().with_json(r#"
+ assert_that(
+ p.cargo("metadata"),
+ execs().with_json(
+ r#"
{
"packages": [
{
"target_directory": "[..]foo[/]target",
"version": 1,
"workspace_root": "[..][/]foo"
- }"#));
+ }"#,
+ ),
+ );
}
#[test]
let p = project("foo")
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[[example]]
name = "ex"
crate-type = ["rlib", "dylib"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("metadata"), execs().with_json(r#"
+ assert_that(
+ p.cargo("metadata"),
+ execs().with_json(
+ r#"
{
"packages": [
{
"target_directory": "[..]foo[/]target",
"version": 1,
"workspace_root": "[..][/]foo"
- }"#));
+ }"#,
+ ),
+ );
}
#[test]
fn workspace_metadata() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["bar", "baz"]
- "#)
+ "#,
+ )
.file("bar/Cargo.toml", &basic_lib_manifest("bar"))
.file("bar/src/lib.rs", "")
.file("baz/Cargo.toml", &basic_lib_manifest("baz"))
.file("baz/src/lib.rs", "")
.build();
- assert_that(p.cargo("metadata"), execs().with_status(0).with_json(r#"
+ assert_that(
+ p.cargo("metadata"),
+ execs().with_status(0).with_json(
+ r#"
{
"packages": [
{
"target_directory": "[..]foo[/]target",
"version": 1,
"workspace_root": "[..][/]foo"
- }"#))
+ }"#,
+ ),
+ )
}
#[test]
fn workspace_metadata_no_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["bar", "baz"]
- "#)
+ "#,
+ )
.file("bar/Cargo.toml", &basic_lib_manifest("bar"))
.file("bar/src/lib.rs", "")
.file("baz/Cargo.toml", &basic_lib_manifest("baz"))
.file("baz/src/lib.rs", "")
.build();
- assert_that(p.cargo("metadata").arg("--no-deps"), execs().with_status(0).with_json(r#"
+ assert_that(
+ p.cargo("metadata").arg("--no-deps"),
+ execs().with_status(0).with_json(
+ r#"
{
"packages": [
{
"target_directory": "[..]foo[/]target",
"version": 1,
"workspace_root": "[..][/]foo"
- }"#))
+ }"#,
+ ),
+ )
}
#[test]
fn cargo_metadata_with_invalid_manifest() {
- let p = project("foo")
- .file("Cargo.toml", "")
- .build();
+ let p = project("foo").file("Cargo.toml", "").build();
- assert_that(p.cargo("metadata").arg("--format-version").arg("1"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("metadata").arg("--format-version").arg("1"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
- virtual manifests must be configured with [workspace]"))
+ virtual manifests must be configured with [workspace]",
+ ),
+ )
}
-const MANIFEST_OUTPUT: &'static str=
- r#"
+const MANIFEST_OUTPUT: &'static str = r#"
{
"packages": [{
"name":"foo",
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("metadata").arg("--no-deps")
- .arg("--manifest-path").arg("foo/Cargo.toml")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(0)
- .with_json(MANIFEST_OUTPUT));
+ assert_that(
+ p.cargo("metadata")
+ .arg("--no-deps")
+ .arg("--manifest-path")
+ .arg("foo/Cargo.toml")
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(0).with_json(MANIFEST_OUTPUT),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("metadata").arg("--no-deps")
- .arg("--manifest-path").arg(p.root().join("Cargo.toml"))
- .cwd(p.root().parent().unwrap()),
- execs().with_status(0)
- .with_json(MANIFEST_OUTPUT));
+ assert_that(
+ p.cargo("metadata")
+ .arg("--no-deps")
+ .arg("--manifest-path")
+ .arg(p.root().join("Cargo.toml"))
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(0).with_json(MANIFEST_OUTPUT),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("metadata").arg("--no-deps")
- .arg("--manifest-path").arg("foo")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(101)
- .with_stderr("[ERROR] the manifest-path must be \
- a path to a Cargo.toml file"));
+ assert_that(
+ p.cargo("metadata")
+ .arg("--no-deps")
+ .arg("--manifest-path")
+ .arg("foo")
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(101).with_stderr(
+ "[ERROR] the manifest-path must be \
+ a path to a Cargo.toml file",
+ ),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("metadata").arg("--no-deps")
- .arg("--manifest-path").arg(p.root())
- .cwd(p.root().parent().unwrap()),
- execs().with_status(101)
- .with_stderr("[ERROR] the manifest-path must be \
- a path to a Cargo.toml file"));
+ assert_that(
+ p.cargo("metadata")
+ .arg("--no-deps")
+ .arg("--manifest-path")
+ .arg(p.root())
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(101).with_stderr(
+ "[ERROR] the manifest-path must be \
+ a path to a Cargo.toml file",
+ ),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("metadata").arg("--no-deps")
- .cwd(p.root()),
- execs().with_status(0)
- .with_json(MANIFEST_OUTPUT));
+ assert_that(
+ p.cargo("metadata").arg("--no-deps").cwd(p.root()),
+ execs().with_status(0).with_json(MANIFEST_OUTPUT),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("metadata").arg("--no-deps")
- .arg("--format-version").arg("2")
- .cwd(p.root()),
- execs().with_status(1)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("metadata")
+ .arg("--no-deps")
+ .arg("--format-version")
+ .arg("2")
+ .cwd(p.root()),
+ execs().with_status(1).with_stderr_contains(
+ "\
error: '2' isn't a valid value for '--format-version <VERSION>'
<tab>[possible values: 1]
-"));
+",
+ ),
+ );
}
#[test]
fn multiple_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[features]
a = []
b = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("metadata")
- .arg("--features").arg("a b"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("metadata").arg("--features").arg("a b"),
+ execs().with_status(0),
+ );
}
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn net_retry_loads_from_config() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
git = "https://127.0.0.1:11/foo/bar"
- "#)
- .file("src/main.rs", "").file(".cargo/config", r#"
+ "#,
+ )
+ .file("src/main.rs", "")
+ .file(
+ ".cargo/config",
+ r#"
[net]
retry=1
[http]
timeout=1
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101)
- .with_stderr_contains("[WARNING] spurious network error \
-(1 tries remaining): [..]"));
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr_contains(
+ "[WARNING] spurious network error \
+ (1 tries remaining): [..]",
+ ),
+ );
}
#[test]
fn net_retry_git_outputs_warning() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
git = "https://127.0.0.1:11/foo/bar"
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[http]
timeout=1
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("-j").arg("1"),
- execs().with_status(101)
- .with_stderr_contains("[WARNING] spurious network error \
-(2 tries remaining): [..]")
- .with_stderr_contains("\
-[WARNING] spurious network error (1 tries remaining): [..]"));
+ assert_that(
+ p.cargo("build").arg("-v").arg("-j").arg("1"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains(
+ "[WARNING] spurious network error \
+ (2 tries remaining): [..]",
+ )
+ .with_stderr_contains(
+ "\
+ [WARNING] spurious network error (1 tries remaining): [..]",
+ ),
+ );
}
use cargo::util::ProcessBuilder;
use cargotest::process;
use cargotest::support::{execs, paths};
-use hamcrest::{assert_that, existing_file, existing_dir, is_not};
+use hamcrest::{assert_that, existing_dir, existing_file, is_not};
use tempdir::TempDir;
fn cargo_process(s: &str) -> ProcessBuilder {
File::create(gitconfig).unwrap();
}
-
#[test]
fn simple_lib() {
- assert_that(cargo_process("new").arg("--lib").arg("foo").arg("--vcs").arg("none")
- .env("USER", "foo"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ cargo_process("new")
+ .arg("--lib")
+ .arg("foo")
+ .arg("--vcs")
+ .arg("none")
+ .env("USER", "foo"),
+ execs().with_status(0).with_stderr(
+ "\
[CREATED] library `foo` project
-"));
+",
+ ),
+ );
assert_that(&paths::root().join("foo"), existing_dir());
assert_that(&paths::root().join("foo/Cargo.toml"), existing_file());
assert_that(&paths::root().join("foo/src/lib.rs"), existing_file());
- assert_that(&paths::root().join("foo/.gitignore"), is_not(existing_file()));
+ assert_that(
+ &paths::root().join("foo/.gitignore"),
+ is_not(existing_file()),
+ );
let lib = paths::root().join("foo/src/lib.rs");
let mut contents = String::new();
- File::open(&lib).unwrap().read_to_string(&mut contents).unwrap();
- assert_eq!(contents, r#"#[cfg(test)]
+ File::open(&lib)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
+ assert_eq!(
+ contents,
+ r#"#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
-"#);
+"#
+ );
- assert_that(cargo_process("build").cwd(&paths::root().join("foo")),
- execs().with_status(0));
+ assert_that(
+ cargo_process("build").cwd(&paths::root().join("foo")),
+ execs().with_status(0),
+ );
}
#[test]
fn simple_bin() {
- assert_that(cargo_process("new").arg("--bin").arg("foo")
- .env("USER", "foo"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ cargo_process("new")
+ .arg("--bin")
+ .arg("foo")
+ .env("USER", "foo"),
+ execs().with_status(0).with_stderr(
+ "\
[CREATED] binary (application) `foo` project
-"));
+",
+ ),
+ );
assert_that(&paths::root().join("foo"), existing_dir());
assert_that(&paths::root().join("foo/Cargo.toml"), existing_file());
assert_that(&paths::root().join("foo/src/main.rs"), existing_file());
- assert_that(cargo_process("build").cwd(&paths::root().join("foo")),
- execs().with_status(0));
- assert_that(&paths::root().join(&format!("foo/target/debug/foo{}",
- env::consts::EXE_SUFFIX)),
- existing_file());
+ assert_that(
+ cargo_process("build").cwd(&paths::root().join("foo")),
+ execs().with_status(0),
+ );
+ assert_that(
+ &paths::root().join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)),
+ existing_file(),
+ );
}
#[test]
fn both_lib_and_bin() {
- assert_that(cargo_process("new").arg("--lib").arg("--bin").arg("foo")
- .env("USER", "foo"),
- execs().with_status(101).with_stderr(
- "[ERROR] can't specify both lib and binary outputs"));
+ assert_that(
+ cargo_process("new")
+ .arg("--lib")
+ .arg("--bin")
+ .arg("foo")
+ .env("USER", "foo"),
+ execs()
+ .with_status(101)
+ .with_stderr("[ERROR] can't specify both lib and binary outputs"),
+ );
}
#[test]
// If this ran inside paths::root() it would detect that we are already
// inside a git repo and skip the initialization.
let td = TempDir::new("cargo").unwrap();
- assert_that(cargo_process("new").arg("--lib").arg("foo").cwd(td.path())
- .env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new")
+ .arg("--lib")
+ .arg("foo")
+ .cwd(td.path())
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
assert_that(td.path(), existing_dir());
assert_that(&td.path().join("foo/Cargo.toml"), existing_file());
assert_that(&td.path().join("foo/.git"), existing_dir());
assert_that(&td.path().join("foo/.gitignore"), existing_file());
- assert_that(cargo_process("build").cwd(&td.path().join("foo")),
- execs().with_status(0));
+ assert_that(
+ cargo_process("build").cwd(&td.path().join("foo")),
+ execs().with_status(0),
+ );
}
#[test]
fn no_argument() {
- assert_that(cargo_process("new"),
- execs().with_status(1)
- .with_stderr_contains("\
+ assert_that(
+ cargo_process("new"),
+ execs().with_status(1).with_stderr_contains(
+ "\
error: The following required arguments were not provided:
<path>
-"));
+",
+ ),
+ );
}
#[test]
fn existing() {
let dst = paths::root().join("foo");
fs::create_dir(&dst).unwrap();
- assert_that(cargo_process("new").arg("foo"),
- execs().with_status(101)
- .with_stderr(format!("[ERROR] destination `{}` already exists\n\n\
- Use `cargo init` to initialize the directory",
- dst.display())));
+ assert_that(
+ cargo_process("new").arg("foo"),
+ execs().with_status(101).with_stderr(format!(
+ "[ERROR] destination `{}` already exists\n\n\
+ Use `cargo init` to initialize the directory",
+ dst.display()
+ )),
+ );
}
#[test]
fn invalid_characters() {
- assert_that(cargo_process("new").arg("foo.rs"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ cargo_process("new").arg("foo.rs"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] Invalid character `.` in crate name: `foo.rs`
-use --name to override crate name"));
+use --name to override crate name",
+ ),
+ );
}
#[test]
fn reserved_name() {
- assert_that(cargo_process("new").arg("test"),
- execs().with_status(101)
- .with_stderr("\
-[ERROR] The name `test` cannot be used as a crate name\n\
-use --name to override crate name"));
+ assert_that(
+ cargo_process("new").arg("test"),
+ execs().with_status(101).with_stderr(
+ "\
+ [ERROR] The name `test` cannot be used as a crate name\n\
+ use --name to override crate name",
+ ),
+ );
}
#[test]
fn reserved_binary_name() {
- assert_that(cargo_process("new").arg("--bin").arg("incremental"),
- execs().with_status(101)
- .with_stderr("\
-[ERROR] The name `incremental` cannot be used as a crate name\n\
-use --name to override crate name"));
+ assert_that(
+ cargo_process("new").arg("--bin").arg("incremental"),
+ execs().with_status(101).with_stderr(
+ "\
+ [ERROR] The name `incremental` cannot be used as a crate name\n\
+ use --name to override crate name",
+ ),
+ );
}
#[test]
fn keyword_name() {
- assert_that(cargo_process("new").arg("pub"),
- execs().with_status(101)
- .with_stderr("\
-[ERROR] The name `pub` cannot be used as a crate name\n\
-use --name to override crate name"));
+ assert_that(
+ cargo_process("new").arg("pub"),
+ execs().with_status(101).with_stderr(
+ "\
+ [ERROR] The name `pub` cannot be used as a crate name\n\
+ use --name to override crate name",
+ ),
+ );
}
#[test]
fn finds_author_user() {
create_empty_gitconfig();
- assert_that(cargo_process("new").arg("foo").env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new").arg("foo").env("USER", "foo"),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["foo"]"#));
}
#[test]
fn finds_author_user_escaped() {
create_empty_gitconfig();
- assert_that(cargo_process("new").arg("foo").env("USER", "foo \"bar\""),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new").arg("foo").env("USER", "foo \"bar\""),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["foo \"bar\""]"#));
}
#[test]
fn finds_author_username() {
create_empty_gitconfig();
- assert_that(cargo_process("new").arg("foo")
- .env_remove("USER")
- .env("USERNAME", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new")
+ .arg("foo")
+ .env_remove("USER")
+ .env("USERNAME", "foo"),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["foo"]"#));
}
#[test]
fn finds_author_priority() {
- assert_that(cargo_process("new").arg("foo")
- .env("USER", "bar2")
- .env("EMAIL", "baz2")
- .env("CARGO_NAME", "bar")
- .env("CARGO_EMAIL", "baz"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new")
+ .arg("foo")
+ .env("USER", "bar2")
+ .env("EMAIL", "baz2")
+ .env("CARGO_NAME", "bar")
+ .env("CARGO_EMAIL", "baz"),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["bar <baz>"]"#));
}
#[test]
fn finds_author_email() {
create_empty_gitconfig();
- assert_that(cargo_process("new").arg("foo")
- .env("USER", "bar")
- .env("EMAIL", "baz"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new")
+ .arg("foo")
+ .env("USER", "bar")
+ .env("EMAIL", "baz"),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["bar <baz>"]"#));
}
#[test]
fn finds_author_git() {
- process("git").args(&["config", "--global", "user.name", "bar"])
- .exec().unwrap();
- process("git").args(&["config", "--global", "user.email", "baz"])
- .exec().unwrap();
- assert_that(cargo_process("new").arg("foo").env("USER", "foo"),
- execs().with_status(0));
+ process("git")
+ .args(&["config", "--global", "user.name", "bar"])
+ .exec()
+ .unwrap();
+ process("git")
+ .args(&["config", "--global", "user.email", "baz"])
+ .exec()
+ .unwrap();
+ assert_that(
+ cargo_process("new").arg("foo").env("USER", "foo"),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["bar <baz>"]"#));
}
#[test]
fn finds_local_author_git() {
- process("git").args(&["init"])
- .exec().unwrap();
- process("git").args(&["config", "--global", "user.name", "foo"])
- .exec().unwrap();
- process("git").args(&["config", "--global", "user.email", "foo@bar"])
- .exec().unwrap();
+ process("git").args(&["init"]).exec().unwrap();
+ process("git")
+ .args(&["config", "--global", "user.name", "foo"])
+ .exec()
+ .unwrap();
+ process("git")
+ .args(&["config", "--global", "user.email", "foo@bar"])
+ .exec()
+ .unwrap();
// Set local git user config
- process("git").args(&["config", "user.name", "bar"])
- .exec().unwrap();
- process("git").args(&["config", "user.email", "baz"])
- .exec().unwrap();
- assert_that(cargo_process("init").env("USER", "foo"),
- execs().with_status(0));
+ process("git")
+ .args(&["config", "user.name", "bar"])
+ .exec()
+ .unwrap();
+ process("git")
+ .args(&["config", "user.email", "baz"])
+ .exec()
+ .unwrap();
+ assert_that(
+ cargo_process("init").env("USER", "foo"),
+ execs().with_status(0),
+ );
let toml = paths::root().join("Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["bar <baz>"]"#));
}
#[test]
fn finds_git_email() {
- assert_that(cargo_process("new").arg("foo")
- .env("GIT_AUTHOR_NAME", "foo")
- .env("GIT_AUTHOR_EMAIL", "gitfoo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new")
+ .arg("foo")
+ .env("GIT_AUTHOR_NAME", "foo")
+ .env("GIT_AUTHOR_EMAIL", "gitfoo"),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["foo <gitfoo>"]"#), contents);
}
-
#[test]
fn finds_git_author() {
create_empty_gitconfig();
- assert_that(cargo_process("new").arg("foo")
- .env_remove("USER")
- .env("GIT_COMMITTER_NAME", "gitfoo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new")
+ .arg("foo")
+ .env_remove("USER")
+ .env("GIT_COMMITTER_NAME", "gitfoo"),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["gitfoo"]"#));
}
#[test]
fn author_prefers_cargo() {
- process("git").args(&["config", "--global", "user.name", "foo"])
- .exec().unwrap();
- process("git").args(&["config", "--global", "user.email", "bar"])
- .exec().unwrap();
+ process("git")
+ .args(&["config", "--global", "user.name", "foo"])
+ .exec()
+ .unwrap();
+ process("git")
+ .args(&["config", "--global", "user.email", "bar"])
+ .exec()
+ .unwrap();
let root = paths::root();
fs::create_dir(&root.join(".cargo")).unwrap();
- File::create(&root.join(".cargo/config")).unwrap().write_all(br#"
+ File::create(&root.join(".cargo/config"))
+ .unwrap()
+ .write_all(
+ br#"
[cargo-new]
name = "new-foo"
email = "new-bar"
vcs = "none"
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
- assert_that(cargo_process("new").arg("foo").env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new").arg("foo").env("USER", "foo"),
+ execs().with_status(0),
+ );
let toml = paths::root().join("foo/Cargo.toml");
let mut contents = String::new();
- File::open(&toml).unwrap().read_to_string(&mut contents).unwrap();
+ File::open(&toml)
+ .unwrap()
+ .read_to_string(&mut contents)
+ .unwrap();
assert!(contents.contains(r#"authors = ["new-foo <new-bar>"]"#));
assert!(!root.join("foo/.gitignore").exists());
}
fn git_prefers_command_line() {
let root = paths::root();
fs::create_dir(&root.join(".cargo")).unwrap();
- File::create(&root.join(".cargo/config")).unwrap().write_all(br#"
+ File::create(&root.join(".cargo/config"))
+ .unwrap()
+ .write_all(
+ br#"
[cargo-new]
vcs = "none"
name = "foo"
email = "bar"
- "#).unwrap();
-
- assert_that(cargo_process("new").arg("foo").arg("--vcs").arg("git")
- .env("USER", "foo"),
- execs().with_status(0));
+ "#,
+ )
+ .unwrap();
+
+ assert_that(
+ cargo_process("new")
+ .arg("foo")
+ .arg("--vcs")
+ .arg("git")
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
assert!(paths::root().join("foo/.gitignore").exists());
}
#[test]
fn subpackage_no_git() {
- assert_that(cargo_process("new").arg("foo").env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new").arg("foo").env("USER", "foo"),
+ execs().with_status(0),
+ );
let subpackage = paths::root().join("foo").join("components");
fs::create_dir(&subpackage).unwrap();
- assert_that(cargo_process("new").arg("foo/components/subcomponent")
- .env("USER", "foo"),
- execs().with_status(0));
-
- assert_that(&paths::root().join("foo/components/subcomponent/.git"),
- is_not(existing_file()));
- assert_that(&paths::root().join("foo/components/subcomponent/.gitignore"),
- is_not(existing_file()));
+ assert_that(
+ cargo_process("new")
+ .arg("foo/components/subcomponent")
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
+
+ assert_that(
+ &paths::root().join("foo/components/subcomponent/.git"),
+ is_not(existing_file()),
+ );
+ assert_that(
+ &paths::root().join("foo/components/subcomponent/.gitignore"),
+ is_not(existing_file()),
+ );
}
#[test]
fn subpackage_git_with_vcs_arg() {
- assert_that(cargo_process("new").arg("foo").env("USER", "foo"),
- execs().with_status(0));
+ assert_that(
+ cargo_process("new").arg("foo").env("USER", "foo"),
+ execs().with_status(0),
+ );
let subpackage = paths::root().join("foo").join("components");
fs::create_dir(&subpackage).unwrap();
- assert_that(cargo_process("new").arg("foo/components/subcomponent")
- .arg("--vcs").arg("git")
- .env("USER", "foo"),
- execs().with_status(0));
-
- assert_that(&paths::root().join("foo/components/subcomponent/.git"),
- existing_dir());
- assert_that(&paths::root().join("foo/components/subcomponent/.gitignore"),
- existing_file());
+ assert_that(
+ cargo_process("new")
+ .arg("foo/components/subcomponent")
+ .arg("--vcs")
+ .arg("git")
+ .env("USER", "foo"),
+ execs().with_status(0),
+ );
+
+ assert_that(
+ &paths::root().join("foo/components/subcomponent/.git"),
+ existing_dir(),
+ );
+ assert_that(
+ &paths::root().join("foo/components/subcomponent/.gitignore"),
+ existing_file(),
+ );
}
#[test]
fn unknown_flags() {
- assert_that(cargo_process("new").arg("foo").arg("--flag"),
- execs().with_status(1)
- .with_stderr_contains("\
+ assert_that(
+ cargo_process("new").arg("foo").arg("--flag"),
+ execs().with_status(1).with_stderr_contains(
+ "\
error: Found argument '--flag' which wasn't expected, or isn't valid in this context
-"));
+",
+ ),
+ );
}
#[test]
fn explicit_invalid_name_not_suggested() {
- assert_that(cargo_process("new").arg("--name").arg("10-invalid").arg("a"),
- execs().with_status(101)
- .with_stderr("\
-[ERROR] Package names starting with a digit cannot be used as a crate name"));
+ assert_that(
+ cargo_process("new")
+ .arg("--name")
+ .arg("10-invalid")
+ .arg("a"),
+ execs().with_status(101).with_stderr(
+ "\
+ [ERROR] Package names starting with a digit cannot be used as a crate name",
+ ),
+ );
}
Package::new("foo", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = {{ git = '{}' }}
- "#, foo.url()))
- .file("src/lib.rs", "
+ "#,
+ foo.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
pub fn bar() {
foo::foo();
}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[UPDATING] git repository `[..]`
[COMPILING] foo v0.1.0 (file://[..])
[COMPILING] local v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn missing_version() {
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
foo = { git = 'https://example.com' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
replacements must specify a version to replace, but `[..]foo` does not
-"));
+",
+ ),
+ );
}
#[test]
fn invalid_semver_version() {
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:*" = { git = 'https://example.com' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
replacements must specify a valid semver version to replace, but `foo:*` does not
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = "0.2.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
replacements cannot specify a version requirement, but found one for [..]
-"));
+",
+ ),
+ );
}
#[test]
fn transitive() {
Package::new("foo", "0.1.0").publish();
Package::new("bar", "0.2.0")
- .dep("foo", "0.1.0")
- .file("src/lib.rs", "extern crate foo; fn bar() { foo::foo(); }")
- .publish();
+ .dep("foo", "0.1.0")
+ .file("src/lib.rs", "extern crate foo; fn bar() { foo::foo(); }")
+ .publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = {{ git = '{}' }}
- "#, foo.url()))
+ "#,
+ foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[UPDATING] git repository `[..]`
[DOWNLOADING] bar v0.2.0 (registry [..])
[COMPILING] bar v0.2.0
[COMPILING] local v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
}
Package::new("foo", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = {{ git = '{}' }}
- "#, foo.url()))
- .file("src/lib.rs", "
+ "#,
+ foo.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
pub fn bar() {
foo::foo();
}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[UPDATING] git repository `file://[..]`
[COMPILING] foo v0.1.0 (file://[..])
[COMPILING] local v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[test]
Package::new("foo", "0.1.0").publish();
let _ = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = { path = "../foo" }
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
pub fn bar() {
foo::foo();
}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[COMPILING] foo v0.1.0 (file://[..])
[COMPILING] local v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn use_a_spec_to_select() {
Package::new("foo", "0.1.1")
- .file("src/lib.rs", "pub fn foo1() {}")
- .publish();
+ .file("src/lib.rs", "pub fn foo1() {}")
+ .publish();
Package::new("foo", "0.2.0").publish();
Package::new("bar", "0.1.1")
- .dep("foo", "0.2")
- .file("src/lib.rs", "
+ .dep("foo", "0.2")
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
pub fn bar() { foo::foo3(); }
- ")
- .publish();
+ ",
+ )
+ .publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.2.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo3() {}")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.2.0" = {{ git = '{}' }}
- "#, foo.url()))
- .file("src/lib.rs", "
+ "#,
+ foo.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
extern crate bar;
foo::foo1();
bar::bar();
}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[UPDATING] git repository `[..]`
[DOWNLOADING] [..]
[COMPILING] [..]
[COMPILING] local v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
Package::new("bar", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"bar:0.1.0" = {{ git = '{}' }}
- "#, foo.url()))
+ "#,
+ foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[UPDATING] git repository `[..]`
[DOWNLOADING] foo v0.1.1 (registry [..])
[COMPILING] bar v0.1.0 ([..])
[COMPILING] local v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
Package::new("foo", "0.1.2").publish();
- assert_that(p.cargo("update").arg("-p").arg(&format!("{}#bar", foo.url())),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("update")
+ .arg("-p")
+ .arg(&format!("{}#bar", foo.url())),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] git repository `file://[..]`
-"));
- assert_that(p.cargo("update")
- .arg("-p")
- .arg("https://github.com/rust-lang/crates.io-index#bar"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("update")
+ .arg("-p")
+ .arg("https://github.com/rust-lang/crates.io-index#bar"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
-"));
+",
+ ),
+ );
assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
}
Package::new("bar", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"bar:0.1.0" = {{ git = '{}' }}
- "#, foo.url()))
+ "#,
+ foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
Package::new("foo", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = {{ git = '{}' }}
- "#, foo.url()))
+ "#,
+ foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry [..]
[UPDATING] git repository [..]
error: no matching package for override `[..]foo:0.1.0` found
location searched: file://[..]
version required: = 0.1.0
-"));
+",
+ ),
+ );
}
#[test]
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = {{ git = '{}' }}
- "#, foo.url()))
+ "#,
+ foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry [..]
[UPDATING] git repository [..]
[ERROR] failed to load source for a dependency on `foo`
Caused by:
Could not find Cargo.toml in `[..]`
-"));
+",
+ ),
+ );
}
#[test]
fn override_wrong_version() {
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = { git = 'https://example.com', version = '0.2.0' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
replacements cannot specify a version requirement, but found one for `[..]foo:0.1.0`
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace."https://github.com/rust-lang/crates.io-index#foo:0.1.0"]
git = '{0}'
- "#, foo.url()))
+ "#,
+ foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry [..]
[UPDATING] git repository [..]
error: overlapping replacement specifications found:
* [..]
both specifications match: foo v0.1.0
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = {{ git = '{0}' }}
- "#, foo.url()))
+ "#,
+ foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("test").arg("-p").arg("foo"),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("test").arg("-p").arg("foo"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: There are multiple `foo` packages in your project, and the [..]
Please re-run this command with [..]
[..]#foo:0.1.0
[..]#foo:0.1.0
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = {{ git = '{0}' }}
- "#, foo.url()))
+ "#,
+ foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
- assert_that(p.cargo("update"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
+ assert_that(
+ p.cargo("update"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[UPDATING] git repository `[..]`
-"));
+",
+ ),
+ );
}
// local -> near -> far
#[test]
fn no_override_self() {
let deps = git::repo(&paths::root().join("override"))
-
- .file("far/Cargo.toml", r#"
+ .file(
+ "far/Cargo.toml",
+ r#"
[package]
name = "far"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("far/src/lib.rs", "")
-
- .file("near/Cargo.toml", r#"
+ .file(
+ "near/Cargo.toml",
+ r#"
[package]
name = "near"
version = "0.1.0"
[dependencies]
far = { path = "../far" }
- "#)
- .file("near/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "near/src/lib.rs",
+ r#"
#![no_std]
pub extern crate far;
- "#)
+ "#,
+ )
.build();
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"near:0.1.0" = {{ git = '{0}' }}
- "#, deps.url()))
- .file("src/lib.rs", r#"
+ "#,
+ deps.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![no_std]
pub extern crate near;
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").arg("--verbose"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--verbose"), execs().with_status(0));
}
#[test]
Package::new("foo", "0.2.0").publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[dependencies]
a = { path = "a1" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a1/Cargo.toml", r#"
+ .file(
+ "a1/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[dependencies]
foo = "0.1"
- "#)
+ "#,
+ )
.file("a1/src/lib.rs", "")
- .file("a2/Cargo.toml", r#"
+ .file(
+ "a2/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[dependencies]
foo = "0.2"
- "#)
+ "#,
+ )
.file("a2/src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
paths = ["a2"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] [..]
warning: path override for crate `a` has altered the original list of
dependencies; the dependency on `foo` was either added or
[COMPILING] [..]
[COMPILING] [..]
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn override_an_override() {
- Package::new("chrono", "0.2.0").dep("serde", "< 0.9").publish();
+ Package::new("chrono", "0.2.0")
+ .dep("serde", "< 0.9")
+ .publish();
Package::new("serde", "0.7.0")
.file("src/lib.rs", "pub fn serde07() {}")
.publish();
.publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"chrono:0.2.0" = { path = "chrono" }
"serde:0.8.0" = { path = "serde" }
- "#)
- .file("Cargo.lock", r#"
+ "#,
+ )
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "local"
version = "0.0.1"
[[package]]
name = "serde"
version = "0.8.0"
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate chrono;
extern crate serde;
chrono::chrono();
serde::serde08_override();
}
- ")
- .file("chrono/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "chrono/Cargo.toml",
+ r#"
[package]
name = "chrono"
version = "0.2.0"
[dependencies]
serde = "< 0.9"
- "#)
- .file("chrono/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "chrono/src/lib.rs",
+ "
extern crate serde;
pub fn chrono() {
serde::serde07();
}
- ")
- .file("serde/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "serde/Cargo.toml",
+ r#"
[package]
name = "serde"
version = "0.8.0"
authors = []
- "#)
- .file("serde/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "serde/src/lib.rs",
+ "
pub fn serde08_override() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
Package::new("bar", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn foo() {}")
.build();
-
let p = project("local")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = {{ git = '{url}' }}
"bar:0.1.0" = {{ git = '{url}' }}
- "#, url = foo.url()))
+ "#,
+ url = foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(
+ p.cargo("build"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[WARNING] package replacement is not used: [..]bar:0.1.0
[FINISHED] [..]
-").with_stdout(""));
+",
+ )
+ .with_stdout(""),
+ );
}
#[test]
Package::new("bar", "0.1.0").publish();
let p = project("ws")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = [ "first_crate", "second_crate"]
[replace]
- "foo:0.1.0" = { path = "local_foo" }"#)
- .file("first_crate/Cargo.toml", r#"
+ "foo:0.1.0" = { path = "local_foo" }"#,
+ )
+ .file(
+ "first_crate/Cargo.toml",
+ r#"
[package]
name = "first_crate"
version = "0.1.0"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("first_crate/src/lib.rs", "")
- .file("second_crate/Cargo.toml", r#"
+ .file(
+ "second_crate/Cargo.toml",
+ r#"
[package]
name = "second_crate"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("second_crate/src/lib.rs", "")
- .file("local_foo/Cargo.toml", r#"
+ .file(
+ "local_foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("local_foo/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").cwd(p.root().join("first_crate")),
- execs().with_status(0)
- .with_stdout("")
- .with_stderr("\
+ assert_that(
+ p.cargo("build").cwd(p.root().join("first_crate")),
+ execs().with_status(0).with_stdout("").with_stderr(
+ "\
[UPDATING] registry `[..]`
[COMPILING] foo v0.1.0 ([..])
[COMPILING] first_crate v0.1.0 ([..])
-[FINISHED] [..]"));
-
- assert_that(p.cargo("build").cwd(p.root().join("second_crate")),
- execs().with_status(0)
- .with_stdout("")
- .with_stderr("\
+[FINISHED] [..]",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build").cwd(p.root().join("second_crate")),
+ execs().with_status(0).with_stdout("").with_stderr(
+ "\
[COMPILING] second_crate v0.1.0 ([..])
-[FINISHED] [..]"));
+[FINISHED] [..]",
+ ),
+ );
}
-
#[test]
fn override_to_path_dep() {
Package::new("foo", "0.1.0").dep("bar", "0.1").publish();
Package::new("bar", "0.1.0").publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
bar = { path = "bar" }
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
- .file("foo/bar/Cargo.toml", r#"
+ .file(
+ "foo/bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("foo/bar/src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
paths = ["foo"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
Package::new("bar", "0.1.0").publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
"foo:0.1.0" = { path = "foo" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "extern crate foo;")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
- "#)
- .file("foo/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ "
extern crate bar;
pub fn foo() {
bar::bar();
}
- ")
- .file("foo/bar/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "foo/bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/bar/src/lib.rs", "pub fn bar() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
Package::new("bar", "0.1.0").publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[dependencies]
foo = { path = "foo" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { version = "0.1", optional = true }
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
- .file("foo2/Cargo.toml", r#"
+ .file(
+ "foo2/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { version = "0.1", optional = true }
- "#)
+ "#,
+ )
.file("foo2/src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
paths = ["foo2"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.1.0 ([..]foo2)
[COMPILING] local v0.0.1 ([..])
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
Package::new("bar", "0.1.0").publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[dependencies]
foo = { path = "foo" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
- .file("foo2/Cargo.toml", r#"
+ .file(
+ "foo2/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { version = "0.1", optional = true }
- "#)
+ "#,
+ )
.file("foo2/src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
paths = ["foo2"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr_contains(
+ "\
warning: path override for crate `foo` has altered the original list of
dependencies; the dependency on `bar` was either added or\
-"));
+",
+ ),
+ );
}
#[test]
fn override_with_default_feature() {
Package::new("another", "0.1.0").publish();
- Package::new("another", "0.1.1")
- .dep("bar", "0.1")
- .publish();
+ Package::new("another", "0.1.1").dep("bar", "0.1").publish();
Package::new("bar", "0.1.0").publish();
let p = project("local")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "local"
version = "0.0.1"
[replace]
'bar:0.1.0' = { path = "bar" }
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
bar::bar();
}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[features]
default = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[cfg(feature = "default")]
pub fn bar() {}
- "#)
- .file("another2/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "another2/Cargo.toml",
+ r#"
[package]
name = "another2"
version = "0.1.0"
[dependencies]
bar = { version = "0.1", default-features = false }
- "#)
+ "#,
+ )
.file("another2/src/lib.rs", "")
.build();
- assert_that(p.cargo("run"),
- execs().with_status(0));
+ assert_that(p.cargo("run"), execs().with_status(0));
}
#[test]
Package::new("bar", "0.1.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[replace]
'bar:0.1.0' = { path = "bar" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = { path = ".." }
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: cyclic package dependency: [..]
-"));
+",
+ ),
+ );
}
use git2;
use cargotest::{cargo_process, process, ChannelChanger};
-use cargotest::support::{project, execs, paths, git, path2url, cargo_exe, registry};
+use cargotest::support::{cargo_exe, execs, git, paths, project, registry, path2url};
use cargotest::support::registry::Package;
use flate2::read::GzDecoder;
-use hamcrest::{assert_that, existing_file, contains};
+use hamcrest::{assert_that, contains, existing_file};
use tar::Archive;
#[test]
.file("src/bar.txt", "") // should be ignored when packaging
.build();
- assert_that(p.cargo("package"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[WARNING] manifest has no documentation[..]
See [..]
[PACKAGING] foo v0.0.1 ({dir})
[COMPILING] foo v0.0.1 ({dir}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
- assert_that(&p.root().join("target/package/foo-0.0.1.crate"), existing_file());
- assert_that(p.cargo("package").arg("-l"),
- execs().with_status(0).with_stdout("\
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ &p.root().join("target/package/foo-0.0.1.crate"),
+ existing_file(),
+ );
+ assert_that(
+ p.cargo("package").arg("-l"),
+ execs().with_status(0).with_stdout(
+ "\
Cargo.toml
src[/]main.rs
-"));
- assert_that(p.cargo("package"),
- execs().with_status(0).with_stdout(""));
+",
+ ),
+ );
+ assert_that(p.cargo("package"), execs().with_status(0).with_stdout(""));
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
let mut rdr = GzDecoder::new(f);
let f = f.unwrap();
let fname = f.header().path_bytes();
let fname = &*fname;
- assert!(fname == b"foo-0.0.1/Cargo.toml" ||
- fname == b"foo-0.0.1/Cargo.toml.orig" ||
- fname == b"foo-0.0.1/src/main.rs",
- "unexpected filename: {:?}", f.header().path())
+ assert!(
+ fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig"
+ || fname == b"foo-0.0.1/src/main.rs",
+ "unexpected filename: {:?}",
+ f.header().path()
+ )
}
}
#[test]
fn metadata_warning() {
let p = project("all")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("package"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
warning: manifest has no description, license, license-file, documentation, \
homepage or repository.
See http://doc.crates.io/manifest.html#package-metadata for more info.
[COMPILING] foo v0.0.1 ({dir}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
let p = project("one")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
license = "MIT"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("package"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
warning: manifest has no description, documentation, homepage or repository.
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 ({dir})
[COMPILING] foo v0.0.1 ({dir}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
let p = project("all")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
license = "MIT"
description = "foo"
repository = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("package"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[PACKAGING] foo v0.0.1 ({dir})
[VERIFYING] foo v0.0.1 ({dir})
[COMPILING] foo v0.0.1 ({dir}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn package_verbose() {
let root = paths::root().join("all");
let p = git::repo(&root)
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
let mut cargo = cargo_process();
assert_that(cargo.clone().arg("build"), execs().with_status(0));
println!("package main repo");
- assert_that(cargo.clone().arg("package").arg("-v").arg("--no-verify"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ cargo.clone().arg("package").arg("-v").arg("--no-verify"),
+ execs().with_status(0).with_stderr(
+ "\
[WARNING] manifest has no description[..]
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 ([..])
[ARCHIVING] [..]
[ARCHIVING] [..]
-"));
+",
+ ),
+ );
println!("package sub-repo");
- assert_that(cargo.arg("package").arg("-v").arg("--no-verify")
- .cwd(p.root().join("a")),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ cargo
+ .arg("package")
+ .arg("-v")
+ .arg("--no-verify")
+ .cwd(p.root().join("a")),
+ execs().with_status(0).with_stderr(
+ "\
[WARNING] manifest has no description[..]
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] a v0.0.1 ([..])
[ARCHIVING] [..]
[ARCHIVING] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn package_verification() {
let p = project("all")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(p.cargo("package"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[WARNING] manifest has no description[..]
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 ({dir})
[COMPILING] foo v0.0.1 ({dir}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn path_dependency_no_version() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("package"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(101).with_stderr(
+ "\
[WARNING] manifest has no documentation, homepage or repository.
See http://doc.crates.io/manifest.html#package-metadata for more info.
[ERROR] all path dependencies must have a version specified when packaging.
dependency `bar` does not specify a version.
-"));
+",
+ ),
+ );
}
#[test]
.file("some_dir/dir_deep_5/some_dir/file", "")
.build();
- assert_that(p.cargo("package").arg("--no-verify").arg("-v"),
- execs().with_status(0).with_stdout("").with_stderr("\
+ assert_that(
+ p.cargo("package").arg("--no-verify").arg("-v"),
+ execs().with_status(0).with_stdout("").with_stderr(
+ "\
[WARNING] manifest has no description[..]
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 ([..])
[ARCHIVING] [..]
[ARCHIVING] [..]
[ARCHIVING] [..]
-"));
+",
+ ),
+ );
- assert_that(&p.root().join("target/package/foo-0.0.1.crate"), existing_file());
+ assert_that(
+ &p.root().join("target/package/foo-0.0.1.crate"),
+ existing_file(),
+ );
- assert_that(p.cargo("package").arg("-l"),
- execs().with_status(0).with_stdout("\
+ assert_that(
+ p.cargo("package").arg("-l"),
+ execs().with_status(0).with_stdout(
+ "\
Cargo.toml
dir_root_1[/]some_dir[/]file
dir_root_2[/]some_dir[/]file
some_dir[/]file_deep_4
some_dir[/]file_deep_5
src[/]main.rs
-"));
+",
+ ),
+ );
}
#[test]
.file("src/bar.txt", "") // should be ignored when packaging
.build();
- assert_that(p.cargo("package").arg("--no-verify").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("package").arg("--no-verify").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[WARNING] manifest has no description[..]
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 ([..])
[ARCHIVING] [..]
[ARCHIVING] [..]
[ARCHIVING] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn package_lib_with_bin() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate foo;
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("package").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("package").arg("-v"), execs().with_status(0));
}
#[test]
fn package_git_submodule() {
let project = git::new("foo", |project| {
- project.file("Cargo.toml", r#"
+ project
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
license = "MIT"
description = "foo"
repository = "foo"
- "#)
- .file("src/lib.rs", "pub fn foo() {}")
- }).unwrap();
- let library = git::new("bar", |library| {
- library.file("Makefile", "all:")
+ "#,
+ )
+ .file("src/lib.rs", "pub fn foo() {}")
}).unwrap();
+ let library = git::new("bar", |library| library.file("Makefile", "all:")).unwrap();
let repository = git2::Repository::open(&project.root()).unwrap();
let url = path2url(library.root()).to_string();
git::commit(&repository);
let repository = git2::Repository::open(&project.root().join("bar")).unwrap();
- repository.reset(&repository.revparse_single("HEAD").unwrap(),
- git2::ResetType::Hard, None).unwrap();
-
- assert_that(cargo_process().arg("package").cwd(project.root())
- .arg("--no-verify").arg("-v"),
- execs().with_status(0).with_stderr_contains("[ARCHIVING] bar/Makefile"));
+ repository
+ .reset(
+ &repository.revparse_single("HEAD").unwrap(),
+ git2::ResetType::Hard,
+ None,
+ )
+ .unwrap();
+
+ assert_that(
+ cargo_process()
+ .arg("package")
+ .cwd(project.root())
+ .arg("--no-verify")
+ .arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains("[ARCHIVING] bar/Makefile"),
+ );
}
#[test]
fn no_duplicates_from_modified_tracked_files() {
let root = paths::root().join("all");
let p = git::repo(&root)
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- File::create(p.root().join("src/main.rs")).unwrap().write_all(br#"
+ File::create(p.root().join("src/main.rs"))
+ .unwrap()
+ .write_all(
+ br#"
fn main() { println!("A change!"); }
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
let mut cargo = cargo_process();
cargo.cwd(p.root());
assert_that(cargo.clone().arg("build"), execs().with_status(0));
- assert_that(cargo.arg("package").arg("--list"),
- execs().with_status(0).with_stdout("\
+ assert_that(
+ cargo.arg("package").arg("--list"),
+ execs().with_status(0).with_stdout(
+ "\
Cargo.toml
src/main.rs
-"));
+",
+ ),
+ );
}
#[test]
.file("a_dir/nested/src/main.rs", main_rs)
.build();
- assert_that(p.cargo("package"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[WARNING] manifest has no documentation[..]
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] nested v0.0.1 ({dir})
[COMPILING] nested v0.0.1 ({dir}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
- assert_that(&p.root().join("target/package/nested-0.0.1.crate"), existing_file());
- assert_that(p.cargo("package").arg("-l"),
- execs().with_status(0).with_stdout("\
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ &p.root().join("target/package/nested-0.0.1.crate"),
+ existing_file(),
+ );
+ assert_that(
+ p.cargo("package").arg("-l"),
+ execs().with_status(0).with_stdout(
+ "\
Cargo.toml
src[..]main.rs
-"));
- assert_that(p.cargo("package"),
- execs().with_status(0).with_stdout(""));
+",
+ ),
+ );
+ assert_that(p.cargo("package"), execs().with_status(0).with_stdout(""));
let f = File::open(&p.root().join("target/package/nested-0.0.1.crate")).unwrap();
let mut rdr = GzDecoder::new(f);
let f = f.unwrap();
let fname = f.header().path_bytes();
let fname = &*fname;
- assert!(fname == b"nested-0.0.1/Cargo.toml" ||
- fname == b"nested-0.0.1/Cargo.toml.orig" ||
- fname == b"nested-0.0.1/src/main.rs",
- "unexpected filename: {:?}", f.header().path())
+ assert!(
+ fname == b"nested-0.0.1/Cargo.toml" || fname == b"nested-0.0.1/Cargo.toml.orig"
+ || fname == b"nested-0.0.1/src/main.rs",
+ "unexpected filename: {:?}",
+ f.header().path()
+ )
}
}
#[test]
fn package_weird_characters() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { println!("hello"); }
- "#)
+ "#,
+ )
.file("src/:foo", "")
.build();
- assert_that(p.cargo("package"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(101).with_stderr(
+ "\
warning: [..]
See [..]
[PACKAGING] foo [..]
Caused by:
cannot package a filename with a special character `:`: src/:foo
-"));
+",
+ ),
+ );
}
#[test]
fn repackage_on_source_change() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { println!("hello"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("package"),
- execs().with_status(0));
+ assert_that(p.cargo("package"), execs().with_status(0));
// Add another source file
let mut file = File::create(p.root().join("src").join("foo.rs")).unwrap_or_else(|e| {
- panic!("could not create file {}: {}", p.root().join("src/foo.rs").display(), e)
+ panic!(
+ "could not create file {}: {}",
+ p.root().join("src/foo.rs").display(),
+ e
+ )
});
- file.write_all(br#"
+ file.write_all(
+ br#"
fn main() { println!("foo"); }
- "#).unwrap();
+ "#,
+ ).unwrap();
std::mem::drop(file);
let mut pro = process(&cargo_exe());
pro.arg("package").cwd(p.root());
// Check that cargo rebuilds the tarball
- assert_that(pro, execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ pro,
+ execs().with_status(0).with_stderr(&format!(
+ "\
[WARNING] [..]
See [..]
[PACKAGING] foo v0.0.1 ({dir})
[COMPILING] foo v0.0.1 ({dir}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
// Check that the tarball contains the added file
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
rdr.read_to_end(&mut contents).unwrap();
let mut ar = Archive::new(&contents[..]);
let entries = ar.entries().unwrap();
- let entry_paths = entries.map(|entry| {
- entry.unwrap().path().unwrap().into_owned()
- }).collect::<Vec<PathBuf>>();
- assert_that(&entry_paths, contains(vec![PathBuf::from("foo-0.0.1/src/foo.rs")]));
+ let entry_paths = entries
+ .map(|entry| entry.unwrap().path().unwrap().into_owned())
+ .collect::<Vec<PathBuf>>();
+ assert_that(
+ &entry_paths,
+ contains(vec![PathBuf::from("foo-0.0.1/src/foo.rs")]),
+ );
}
#[test]
use std::os::unix::fs;
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
documentation = 'foo'
homepage = 'foo'
repository = 'foo'
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { println!("hello"); }
- "#)
+ "#,
+ )
.build();
t!(fs::symlink("nowhere", &p.root().join("src/foo.rs")));
- assert_that(p.cargo("package").arg("-v"),
- execs().with_status(101)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("package").arg("-v"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: failed to prepare local package for uploading
Caused by:
Caused by:
[..]
-"));
+",
+ ),
+ );
}
#[test]
// Create a Git repository containing a minimal Rust project.
let _ = git::repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
documentation = "foo"
homepage = "foo"
repository = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
// Modify Cargo.toml without committing the change.
- p.change_file("Cargo.toml", r#"
+ p.change_file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
homepage = "foo"
repository = "foo"
# change
- "#);
+ "#,
+ );
- assert_that(p.cargo("package"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(101).with_stderr(
+ "\
error: 1 files in the working directory contain changes that were not yet \
committed into git:
Cargo.toml
to proceed despite this, pass the `--allow-dirty` flag
-"));
+",
+ ),
+ );
}
#[test]
Package::new("ghi", "1.0.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
def = { version = "1.0", registry = "alternative" }
ghi = "1.0"
abc = "1.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("package")
- .masquerade_as_nightly_cargo()
- .arg("--no-verify"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("package")
+ .masquerade_as_nightly_cargo()
+ .arg("--no-verify"),
+ execs().with_status(0),
+ );
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
let mut rdr = GzDecoder::new(f);
let mut contents = Vec::new();
rdr.read_to_end(&mut contents).unwrap();
let mut ar = Archive::new(&contents[..]);
- let mut entry = ar.entries().unwrap()
- .map(|f| f.unwrap())
- .find(|e| e.path().unwrap().ends_with("Cargo.toml"))
- .unwrap();
+ let mut entry = ar.entries()
+ .unwrap()
+ .map(|f| f.unwrap())
+ .find(|e| e.path().unwrap().ends_with("Cargo.toml"))
+ .unwrap();
let mut contents = String::new();
entry.read_to_string(&mut contents).unwrap();
// BTreeMap makes the order of dependencies in the generated file deterministic
// by sorting alphabetically
- assert_eq!(&contents[..],
-&*format!(
-r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+ assert_eq!(
+ &contents[..],
+ &*format!(
+ r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
[dependencies.ghi]
version = "1.0"
"#,
- registry::alt_registry()));
+ registry::alt_registry()
+ )
+ );
}
#[test]
fn ignore_workspace_specifier() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = { path = "bar", version = "0.1" }
- "#)
+ "#,
+ )
.file("src/main.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
workspace = ".."
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("package").arg("--no-verify").cwd(p.root().join("bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("package")
+ .arg("--no-verify")
+ .cwd(p.root().join("bar")),
+ execs().with_status(0),
+ );
let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap();
let mut rdr = GzDecoder::new(f);
let mut contents = Vec::new();
rdr.read_to_end(&mut contents).unwrap();
let mut ar = Archive::new(&contents[..]);
- let mut entry = ar.entries().unwrap()
- .map(|f| f.unwrap())
- .find(|e| e.path().unwrap().ends_with("Cargo.toml"))
- .unwrap();
+ let mut entry = ar.entries()
+ .unwrap()
+ .map(|f| f.unwrap())
+ .find(|e| e.path().unwrap().ends_with("Cargo.toml"))
+ .unwrap();
let mut contents = String::new();
entry.read_to_string(&mut contents).unwrap();
- assert_eq!(&contents[..],
-r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+ assert_eq!(
+ &contents[..],
+ r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
name = "bar"
version = "0.1.0"
authors = []
-"#);
+"#
+ );
}
#[test]
Package::new("other", "1.0.0").publish();
Package::new("other1", "1.0.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
other = "1.0"
other1 = { version = "1.0" }
- "#)
+ "#,
+ )
.file("src/main.rs", "")
.build();
- assert_that(p.cargo("package").arg("--no-verify"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("package").arg("--no-verify"),
+ execs().with_status(0),
+ );
}
#[test]
fn test_epoch() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["epoch"]
[package]
name = "foo"
version = "0.0.1"
authors = []
rust = "2018"
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("build").arg("-v")
- .masquerade_as_nightly_cargo(),
- execs()
+ assert_that(
+ p.cargo("build").arg("-v").masquerade_as_nightly_cargo(),
+ execs()
// -Zepoch is still in flux and we're not passing -Zunstable-options
// from Cargo so it will probably error. Only partially match the output
// until stuff stabilizes
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
-", dir = p.root().display(), url = p.url())));
+", dir = p.root().display(), url = p.url())),
+ );
}
#[test]
fn test_epoch_missing() {
// no epoch = 2015
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["epoch"]
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("build").arg("-v")
- .masquerade_as_nightly_cargo(),
- execs()
+ assert_that(
+ p.cargo("build").arg("-v").masquerade_as_nightly_cargo(),
+ execs()
// -Zepoch is still in flux and we're not passing -Zunstable-options
// from Cargo so it will probably error. Only partially match the output
// until stuff stabilizes
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
-", dir = p.root().display(), url = p.url())));
+", dir = p.root().display(), url = p.url())),
+ );
}
#[test]
fn test_epoch_malformed() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["epoch"]
[package]
name = "foo"
version = "0.0.1"
authors = []
rust = "chicken"
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("build").arg("-v")
- .masquerade_as_nightly_cargo(),
- execs()
- .with_status(101)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("build").arg("-v").masquerade_as_nightly_cargo(),
+ execs().with_status(101).with_stderr(format!(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
the `rust` key must be one of: `2015`, `2018`
-")));
+"
+ )),
+ );
}
-
#[test]
fn test_epoch_nightly() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
rust = "2015"
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("build").arg("-v")
- .masquerade_as_nightly_cargo(),
- execs()
- .with_status(101)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("build").arg("-v").masquerade_as_nightly_cargo(),
+ execs().with_status(101).with_stderr(format!(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
feature `epoch` is required
consider adding `cargo-features = [\"epoch\"]` to the manifest
-")));
+"
+ )),
+ );
}
#[test]
fn package_lockfile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["publish-lockfile"]
[project]
license = "MIT"
description = "foo"
publish-lockfile = true
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("package").masquerade_as_nightly_cargo(),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("package").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[WARNING] manifest has no documentation[..]
See [..]
[PACKAGING] foo v0.0.1 ({dir})
[COMPILING] foo v0.0.1 ({dir}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
- assert_that(&p.root().join("target/package/foo-0.0.1.crate"), existing_file());
- assert_that(p.cargo("package").arg("-l").masquerade_as_nightly_cargo(),
- execs().with_status(0).with_stdout("\
+ dir = p.url()
+ )),
+ );
+ assert_that(
+ &p.root().join("target/package/foo-0.0.1.crate"),
+ existing_file(),
+ );
+ assert_that(
+ p.cargo("package").arg("-l").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stdout(
+ "\
Cargo.lock
Cargo.toml
src[/]main.rs
-"));
- assert_that(p.cargo("package").masquerade_as_nightly_cargo(),
- execs().with_status(0).with_stdout(""));
+",
+ ),
+ );
+ assert_that(
+ p.cargo("package").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stdout(""),
+ );
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
let mut rdr = GzDecoder::new(f);
let f = f.unwrap();
let fname = f.header().path_bytes();
let fname = &*fname;
- assert!(fname == b"foo-0.0.1/Cargo.toml" ||
- fname == b"foo-0.0.1/Cargo.toml.orig" ||
- fname == b"foo-0.0.1/Cargo.lock" ||
- fname == b"foo-0.0.1/src/main.rs",
- "unexpected filename: {:?}", f.header().path())
+ assert!(
+ fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig"
+ || fname == b"foo-0.0.1/Cargo.lock"
+ || fname == b"foo-0.0.1/src/main.rs",
+ "unexpected filename: {:?}",
+ f.header().path()
+ )
}
}
// Create a Git repository containing a minimal Rust project.
let _ = git::repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["publish-lockfile"]
[project]
homepage = "foo"
repository = "foo"
publish-lockfile = true
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("package").arg("-l").masquerade_as_nightly_cargo(),
- execs().with_status(0).with_stdout("\
+ assert_that(
+ p.cargo("package").arg("-l").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stdout(
+ "\
Cargo.lock
Cargo.toml
src/main.rs
-"));
+",
+ ),
+ );
}
#[test]
fn no_lock_file_with_library() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["publish-lockfile"]
[project]
license = "MIT"
description = "foo"
publish-lockfile = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("package").masquerade_as_nightly_cargo(),
- execs().with_status(0));
+ assert_that(
+ p.cargo("package").masquerade_as_nightly_cargo(),
+ execs().with_status(0),
+ );
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
let mut rdr = GzDecoder::new(f);
#[test]
fn lock_file_and_workspace() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
cargo-features = ["publish-lockfile"]
[package]
license = "MIT"
description = "foo"
publish-lockfile = true
- "#)
+ "#,
+ )
.file("foo/src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("package")
- .cwd(p.root().join("foo"))
- .masquerade_as_nightly_cargo(),
- execs().with_status(0));
+ assert_that(
+ p.cargo("package")
+ .cwd(p.root().join("foo"))
+ .masquerade_as_nightly_cargo(),
+ execs().with_status(0),
+ );
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
let mut rdr = GzDecoder::new(f);
let mut contents = Vec::new();
rdr.read_to_end(&mut contents).unwrap();
let mut ar = Archive::new(&contents[..]);
- assert!(
- ar.entries().unwrap()
- .into_iter()
- .any(|f|{
- let f = f.unwrap();
- let fname = f.header().path().unwrap();
- fname.ends_with("Cargo.lock")
- })
- );
+ assert!(ar.entries().unwrap().into_iter().any(|f| {
+ let f = f.unwrap();
+ let fname = f.header().path().unwrap();
+ fname.ends_with("Cargo.lock")
+ }));
}
fn replace() {
Package::new("foo", "0.1.0").publish();
Package::new("deep-foo", "0.1.0")
- .file("src/lib.rs", r#"
+ .file(
+ "src/lib.rs",
+ r#"
extern crate foo;
pub fn deep() {
foo::foo();
}
- "#)
+ "#,
+ )
.dep("foo", "0.1.0")
.publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = "foo" }
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
extern crate deep_foo;
pub fn bar() {
foo::foo();
deep_foo::deep();
}
- ")
- .file("foo/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[DOWNLOADING] deep-foo v0.1.0 ([..])
[COMPILING] foo v0.1.0 (file://[..])
[COMPILING] deep-foo v0.1.0
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("build"),//.env("RUST_LOG", "trace"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build"), //.env("RUST_LOG", "trace"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
}
#[test]
Package::new("baz", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = "foo" }
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
pub fn bar() {
foo::foo();
}
- ")
- .file("foo/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[COMPILING] foo v0.1.0 (file://[..])
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
}
#[test]
fn patch_git() {
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let p = project("bar")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.'{0}']
foo = {{ path = "foo" }}
- "#, foo.url()))
- .file("src/lib.rs", "
+ "#,
+ foo.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
pub fn bar() {
foo::foo();
}
- ")
- .file("foo/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] git repository `file://[..]`
[COMPILING] foo v0.1.0 (file://[..])
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
}
#[test]
fn patch_to_git() {
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn foo() {}")
.build();
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = {{ git = '{}' }}
- "#, foo.url()))
- .file("src/lib.rs", "
+ "#,
+ foo.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate foo;
pub fn bar() {
foo::foo();
}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build"),//.env("RUST_LOG", "cargo=trace"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"), //.env("RUST_LOG", "cargo=trace"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] git repository `file://[..]`
[UPDATING] registry `file://[..]`
[COMPILING] foo v0.1.0 (file://[..])
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = "foo" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.2.0"
authors = []
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
not rust code
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[DOWNLOADING] foo v0.1.0 [..]
[COMPILING] foo v0.1.0
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
// unused patch should be in the lock file
let mut lock = String::new();
- File::open(p.root().join("Cargo.lock")).unwrap()
- .read_to_string(&mut lock).unwrap();
+ File::open(p.root().join("Cargo.lock"))
+ .unwrap()
+ .read_to_string(&mut lock)
+ .unwrap();
let toml: toml::Value = toml::from_str(&lock).unwrap();
assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1);
assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("foo"));
- assert_eq!(toml["patch"]["unused"][0]["version"].as_str(), Some("0.2.0"));
+ assert_eq!(
+ toml["patch"]["unused"][0]["version"].as_str(),
+ Some("0.2.0")
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let foo = git::repo(&paths::root().join("override"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.2.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let p = project("bar")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = {{ git = '{}' }}
- "#, foo.url()))
+ "#,
+ foo.url()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] git repository `file://[..]`
[UPDATING] registry `file://[..]`
[DOWNLOADING] foo v0.1.0 [..]
[COMPILING] foo v0.1.0
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[DOWNLOADING] foo v0.1.0 [..]
[COMPILING] foo v0.1.0
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
-
- t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#"
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
+
+ t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+ br#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = 'foo' }
- "#));
+ "#
+ ));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.1.0 (file://[..])
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.1"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[DOWNLOADING] foo v0.1.0 [..]
[COMPILING] foo v0.1.0
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
-
- t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#"
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
+
+ t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+ br#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = 'foo' }
- "#));
+ "#
+ ));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("[FINISHED] [..]"));
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr("[FINISHED] [..]"),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = 'foo' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.1"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[COMPILING] foo v0.1.1 [..]
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = 'foo' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("subdir/Cargo.toml", r#"
+ .file(
+ "subdir/Cargo.toml",
+ r#"
[package]
name = "subdir"
version = "0.1.0"
[dependencies]
foo = '0.1.0'
- "#)
+ "#,
+ )
.file("subdir/src/lib.rs", r#""#)
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.1"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[COMPILING] foo v0.1.1 [..]
[COMPILING] subdir v0.1.0 [..]
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = 'foo' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.2.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[COMPILING] foo v0.2.0 [..]
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
Package::new("foo", "0.2.0").publish();
- assert_that(p.cargo("update"),
- execs().with_status(0));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(p.cargo("update"), execs().with_status(0));
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
- t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#"
+ t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+ br#"
[package]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "0.2.0"
- "#));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ "#
+ ));
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[DOWNLOADING] foo v0.2.0 [..]
[COMPILING] foo v0.2.0
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = 'foo' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("subdir/Cargo.toml", r#"
+ .file(
+ "subdir/Cargo.toml",
+ r#"
[package]
name = "subdir"
version = "0.1.0"
[dependencies]
foo = '0.2.0'
- "#)
+ "#,
+ )
.file("subdir/src/lib.rs", r#""#)
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.2.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `file://[..]`
[COMPILING] foo v0.2.0 [..]
[COMPILING] subdir v0.1.0 [..]
[COMPILING] bar v0.0.1 (file://[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
Package::new("bar", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = 'foo' }
bar = { path = 'bar' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", r#""#)
.build();
// Generate a lock file where `bar` is unused
assert_that(p.cargo("build"), execs().with_status(0));
let mut lock_file1 = String::new();
- File::open(p.root().join("Cargo.lock")).unwrap()
- .read_to_string(&mut lock_file1).unwrap();
+ File::open(p.root().join("Cargo.lock"))
+ .unwrap()
+ .read_to_string(&mut lock_file1)
+ .unwrap();
// Remove `bar` and generate a new lock file form the old one
- File::create(p.root().join("Cargo.toml")).unwrap().write_all(r#"
+ File::create(p.root().join("Cargo.toml"))
+ .unwrap()
+ .write_all(
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = { path = 'foo' }
- "#.as_bytes()).unwrap();
+ "#.as_bytes(),
+ )
+ .unwrap();
assert_that(p.cargo("build"), execs().with_status(0));
let mut lock_file2 = String::new();
- File::open(p.root().join("Cargo.lock")).unwrap()
- .read_to_string(&mut lock_file2).unwrap();
+ File::open(p.root().join("Cargo.lock"))
+ .unwrap()
+ .read_to_string(&mut lock_file2)
+ .unwrap();
// Remove the lock file and build from scratch
fs::remove_file(p.root().join("Cargo.lock")).unwrap();
assert_that(p.cargo("build"), execs().with_status(0));
let mut lock_file3 = String::new();
- File::open(p.root().join("Cargo.lock")).unwrap()
- .read_to_string(&mut lock_file3).unwrap();
+ File::open(p.root().join("Cargo.lock"))
+ .unwrap()
+ .read_to_string(&mut lock_file3)
+ .unwrap();
assert!(lock_file1.contains("bar"));
assert_eq!(lock_file2, lock_file3);
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.some-other-source]
foo = { path = 'foo' }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
invalid url `some-other-source`: relative URL without a base
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[patch.crates-io]
foo = "0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] [..]
error: failed to resolve patches for `[..]`
Caused by:
patch for `foo` in `[..]` points to the same source, but patches must point \
to different sources
-"));
+",
+ ),
+ );
}
#[test]
Package::new("foo", "0.1.0").publish();
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["bar"]
[patch.crates-io]
foo = { path = "foo" }
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
[dependencies]
foo = "0.1"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
.publish();
let p = project("p")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "p"
authors = []
[patch.crates-io]
foo = { path = "foo" }
bar = { path = "bar" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.1"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", r#""#)
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.1"
[dependencies]
foo = "0.1"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", r#""#)
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
// Nothing should be rebuilt, no registry should be updated.
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
use cargotest;
use cargotest::sleep_ms;
use cargotest::support::paths::{self, CargoPathExt};
-use cargotest::support::{project, execs, main_file};
+use cargotest::support::{execs, main_file, project};
use cargotest::support::registry::Package;
use hamcrest::{assert_that, existing_file};
// Windows, for more info see #3466.
fn cargo_compile_with_nested_deps_shorthand() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
path = "bar"
- "#)
- .file("src/main.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
- "#)
- .file("bar/src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
- "#)
- .file("bar/baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/baz/Cargo.toml",
+ r#"
[project]
name = "baz"
[lib]
name = "baz"
- "#)
- .file("bar/baz/src/baz.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/baz/src/baz.rs",
+ r#"
pub fn gimme() -> String {
"test passed".to_string()
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/bar/baz)\n\
- [COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url(),
- p.url(),
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "[COMPILING] baz v0.5.0 ({}/bar/baz)\n\
+ [COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url(),
+ p.url(),
+ p.url()
+ )),
+ );
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_stdout("test passed\n").with_status(0));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_stdout("test passed\n").with_status(0),
+ );
println!("cleaning");
- assert_that(p.cargo("clean").arg("-v"),
- execs().with_stdout("").with_status(0));
+ assert_that(
+ p.cargo("clean").arg("-v"),
+ execs().with_stdout("").with_status(0),
+ );
println!("building baz");
- assert_that(p.cargo("build").arg("-p").arg("baz"),
- execs().with_status(0)
- .with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/bar/baz)\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url())));
+ assert_that(
+ p.cargo("build").arg("-p").arg("baz"),
+ execs().with_status(0).with_stderr(&format!(
+ "[COMPILING] baz v0.5.0 ({}/bar/baz)\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url()
+ )),
+ );
println!("building foo");
- assert_that(p.cargo("build")
- .arg("-p").arg("foo"),
- execs().with_status(0)
- .with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url(),
- p.url())));
+ assert_that(
+ p.cargo("build").arg("-p").arg("foo"),
+ execs().with_status(0).with_stderr(&format!(
+ "[COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url(),
+ p.url()
+ )),
+ );
}
#[test]
fn cargo_compile_with_root_dev_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[[bin]]
name = "foo"
- "#)
- .file("src/main.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
.build();
let _p2 = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn gimme() -> &'static str {
"zoidberg"
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101))
+ assert_that(p.cargo("build"), execs().with_status(101))
}
#[test]
fn cargo_compile_with_root_dev_deps_with_testing() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[[bin]]
name = "foo"
- "#)
- .file("src/main.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
.build();
let _p2 = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn gimme() -> &'static str {
"zoidberg"
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_stderr("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_stderr(
+ "\
[COMPILING] [..] v0.5.0 ([..])
[COMPILING] [..] v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]")
- .with_stdout_contains("running 0 tests"));
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ )
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
fn cargo_compile_with_transitive_dev_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
path = "bar"
- "#)
- .file("src/main.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
- "#)
- .file("bar/src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
pub fn gimme() -> &'static str {
"zoidberg"
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in \
- [..]\n",
- p.url(),
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n",
+ p.url(),
+ p.url()
+ )),
+ );
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_stdout("zoidberg\n"));
+ assert_that(process(&p.bin("foo")), execs().with_stdout("zoidberg\n"));
}
#[test]
fn no_rebuild_dependency() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() { bar::bar() }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
- "#)
- .file("bar/src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
// First time around we should compile both foo and bar
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url(),
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url(),
+ p.url()
+ )),
+ );
sleep_ms(1000);
- p.change_file("src/main.rs", r#"
+ p.change_file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() { bar::bar(); }
- "#);
+ "#,
+ );
// Don't compile bar, but do recompile foo.
- assert_that(p.cargo("build"),
- execs().with_stderr("\
- [COMPILING] foo v0.5.0 ([..])\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n"));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(
+ "\
+ [COMPILING] foo v0.5.0 ([..])\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ ),
+ );
}
#[test]
fn deep_dependencies_trigger_rebuild() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() { bar::bar() }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
name = "bar"
[dependencies.baz]
path = "../baz"
- "#)
- .file("bar/src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
extern crate baz;
pub fn bar() { baz::baz() }
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
[lib]
name = "baz"
- "#)
- .file("baz/src/baz.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/baz.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/baz)\n\
- [COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url(),
- p.url(),
- p.url())));
- assert_that(p.cargo("build"),
- execs().with_stdout(""));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] baz v0.5.0 ({}/baz)\n\
+ [COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url(),
+ p.url(),
+ p.url()
+ )),
+ );
+ assert_that(p.cargo("build"), execs().with_stdout(""));
// Make sure an update to baz triggers a rebuild of bar
//
// We base recompilation off mtime, so sleep for at least a second to ensure
// that this write will change the mtime.
sleep_ms(1000);
- File::create(&p.root().join("baz/src/baz.rs")).unwrap().write_all(br#"
+ File::create(&p.root().join("baz/src/baz.rs"))
+ .unwrap()
+ .write_all(
+ br#"
pub fn baz() { println!("hello!"); }
- "#).unwrap();
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/baz)\n\
- [COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url(),
- p.url(),
- p.url())));
+ "#,
+ )
+ .unwrap();
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] baz v0.5.0 ({}/baz)\n\
+ [COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url(),
+ p.url(),
+ p.url()
+ )),
+ );
// Make sure an update to bar doesn't trigger baz
sleep_ms(1000);
- File::create(&p.root().join("bar/src/bar.rs")).unwrap().write_all(br#"
+ File::create(&p.root().join("bar/src/bar.rs"))
+ .unwrap()
+ .write_all(
+ br#"
extern crate baz;
pub fn bar() { println!("hello!"); baz::baz(); }
- "#).unwrap();
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url(),
- p.url())));
-
+ "#,
+ )
+ .unwrap();
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url(),
+ p.url()
+ )),
+ );
}
#[test]
fn no_rebuild_two_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
path = "bar"
[dependencies.baz]
path = "baz"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() { bar::bar() }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
name = "bar"
[dependencies.baz]
path = "../baz"
- "#)
- .file("bar/src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
pub fn bar() {}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
[lib]
name = "baz"
- "#)
- .file("baz/src/baz.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/baz.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] baz v0.5.0 ({}/baz)\n\
- [COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url(),
- p.url(),
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] baz v0.5.0 ({}/baz)\n\
+ [COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url(),
+ p.url(),
+ p.url()
+ )),
+ );
assert_that(&p.bin("foo"), existing_file());
- assert_that(p.cargo("build"),
- execs().with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_stdout(""));
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn nested_deps_recompile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
path = "src/bar"
- "#)
- .file("src/main.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("src/bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "src/bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
- "#)
+ "#,
+ )
.file("src/bar/src/bar.rs", "pub fn gimme() -> i32 { 92 }")
.build();
let bar = p.url();
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/src/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- bar,
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] bar v0.5.0 ({}/src/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ bar,
+ p.url()
+ )),
+ );
sleep_ms(1000);
- File::create(&p.root().join("src/main.rs")).unwrap().write_all(br#"
+ File::create(&p.root().join("src/main.rs"))
+ .unwrap()
+ .write_all(
+ br#"
fn main() {}
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
// This shouldn't recompile `bar`
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url()
+ )),
+ );
}
#[test]
fn error_message_for_missing_manifest() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
[dependencies.bar]
path = "src/bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bar/not-a-manifest", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to load source for a dependency on `bar`
Caused by:
Caused by:
[..] (os error [..])
-"));
-
+",
+ ),
+ );
}
#[test]
fn override_relative() {
let bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
- .file("src/lib.rs", "")
+ "#,
+ )
+ .file("src/lib.rs", "")
.build();
fs::create_dir(&paths::root().join(".cargo")).unwrap();
- File::create(&paths::root().join(".cargo/config")).unwrap()
- .write_all(br#"paths = ["bar"]"#).unwrap();
+ File::create(&paths::root().join(".cargo/config"))
+ .unwrap()
+ .write_all(br#"paths = ["bar"]"#)
+ .unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
[dependencies.bar]
path = '{}'
- "#, bar.root().display()))
- .file("src/lib.rs", "")
- .build();
+ "#,
+ bar.root().display()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
-
}
#[test]
fn override_self() {
let bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let p = project("foo");
let root = p.root().clone();
- let p = p
- .file(".cargo/config", &format!(r#"
+ let p = p.file(
+ ".cargo/config",
+ &format!(
+ r#"
paths = ['{}']
- "#, root.display()))
- .file("Cargo.toml", &format!(r#"
+ "#,
+ root.display()
+ ),
+ ).file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
[dependencies.bar]
path = '{}'
- "#, bar.root().display()))
- .file("src/lib.rs", "")
- .file("src/main.rs", "fn main() {}")
- .build();
+ "#,
+ bar.root().display()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .build();
assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn override_path_dep() {
let bar = project("bar")
- .file("p1/Cargo.toml", r#"
+ .file(
+ "p1/Cargo.toml",
+ r#"
[package]
name = "p1"
version = "0.5.0"
[dependencies.p2]
path = "../p2"
- "#)
- .file("p1/src/lib.rs", "")
- .file("p2/Cargo.toml", r#"
+ "#,
+ )
+ .file("p1/src/lib.rs", "")
+ .file(
+ "p2/Cargo.toml",
+ r#"
[package]
name = "p2"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("p2/src/lib.rs", "")
.build();
let p = project("foo")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
paths = ['{}', '{}']
- "#, bar.root().join("p1").display(),
- bar.root().join("p2").display()))
- .file("Cargo.toml", &format!(r#"
+ "#,
+ bar.root().join("p1").display(),
+ bar.root().join("p2").display()
+ ),
+ )
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "foo"
[dependencies.p2]
path = '{}'
- "#, bar.root().join("p2").display()))
- .file("src/lib.rs", "")
- .build();
-
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ "#,
+ bar.root().join("p2").display()
+ ),
+ )
+ .file("src/lib.rs", "")
+ .build();
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn path_dep_build_cmd() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
path = "bar"
- "#)
- .file("src/main.rs",
- &main_file(r#""{}", bar::gimme()"#, &["bar"]))
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
[lib]
name = "bar"
path = "src/bar.rs"
- "#)
- .file("bar/build.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
use std::fs;
fn main() {
fs::copy("src/bar.rs.in", "src/bar.rs").unwrap();
}
- "#)
- .file("bar/src/bar.rs.in", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs.in",
+ r#"
pub fn gimme() -> i32 { 0 }
- "#).build();
+ "#,
+ )
+ .build();
p.root().join("bar").move_into_the_past();
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in \
- [..]\n",
- p.url(),
- p.url())));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n",
+ p.url(),
+ p.url()
+ )),
+ );
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_stdout("0\n"));
+ assert_that(process(&p.bin("foo")), execs().with_stdout("0\n"));
// Touching bar.rs.in should cause the `build` command to run again.
{
let file = fs::File::create(&p.root().join("bar/src/bar.rs.in"));
- file.unwrap().write_all(br#"pub fn gimme() -> i32 { 1 }"#).unwrap();
+ file.unwrap()
+ .write_all(br#"pub fn gimme() -> i32 { 1 }"#)
+ .unwrap();
}
- assert_that(p.cargo("build"),
- execs().with_stderr(&format!("[COMPILING] bar v0.5.0 ({}/bar)\n\
- [COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) in \
- [..]\n",
- p.url(),
- p.url())));
-
- assert_that(process(&p.bin("foo")),
- execs().with_stdout("1\n"));
+ assert_that(
+ p.cargo("build"),
+ execs().with_stderr(&format!(
+ "[COMPILING] bar v0.5.0 ({}/bar)\n\
+ [COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+ [..]\n",
+ p.url(),
+ p.url()
+ )),
+ );
+
+ assert_that(process(&p.bin("foo")), execs().with_stdout("1\n"));
}
#[test]
fn dev_deps_no_rebuild_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[lib]
name = "foo"
doctest = false
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[cfg(test)] #[allow(unused_extern_crates)] extern crate bar;
#[cfg(not(test))] pub fn foo() { env!("FOO"); }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn bar() {}")
.build();
- assert_that(p.cargo("build")
- .env("FOO", "bar"),
- execs().with_status(0)
- .with_stderr(&format!("[COMPILING] foo v0.5.0 ({})\n\
- [FINISHED] dev [unoptimized + debuginfo] target(s) \
- in [..]\n",
- p.url())));
-
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").env("FOO", "bar"),
+ execs().with_status(0).with_stderr(&format!(
+ "[COMPILING] foo v0.5.0 ({})\n\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) \
+ in [..]\n",
+ p.url()
+ )),
+ );
+
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] [..] v0.5.0 ({url}[..])
[COMPILING] [..] v0.5.0 ({url}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", url = p.url()))
- .with_stdout_contains("running 0 tests"));
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ url = p.url()
+ ))
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
fn custom_target_no_rebuild() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
a = { path = "a" }
[workspace]
members = ["a", "b"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.5.0"
authors = []
[dependencies]
a = { path = "../a" }
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a v0.5.0 ([..])
[COMPILING] foo v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- t!(fs::rename(p.root().join("target"), p.root().join("target_moved")));
- assert_that(p.cargo("build")
- .arg("--manifest-path=b/Cargo.toml")
- .env("CARGO_TARGET_DIR", "target_moved"),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+
+ t!(fs::rename(
+ p.root().join("target"),
+ p.root().join("target_moved")
+ ));
+ assert_that(
+ p.cargo("build")
+ .arg("--manifest-path=b/Cargo.toml")
+ .env("CARGO_TARGET_DIR", "target_moved"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] b v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn override_and_depend() {
let p = project("foo")
- .file("a/a1/Cargo.toml", r#"
+ .file(
+ "a/a1/Cargo.toml",
+ r#"
[project]
name = "a1"
version = "0.5.0"
authors = []
[dependencies]
a2 = { path = "../a2" }
- "#)
+ "#,
+ )
.file("a/a1/src/lib.rs", "")
- .file("a/a2/Cargo.toml", r#"
+ .file(
+ "a/a2/Cargo.toml",
+ r#"
[project]
name = "a2"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("a/a2/src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.5.0"
[dependencies]
a1 = { path = "../a/a1" }
a2 = { path = "../a/a2" }
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
- .file("b/.cargo/config", r#"
+ .file(
+ "b/.cargo/config",
+ r#"
paths = ["../a"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").cwd(p.root().join("b")),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").cwd(p.root().join("b")),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a2 v0.5.0 ([..])
[COMPILING] a1 v0.5.0 ([..])
[COMPILING] b v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn missing_path_dependency() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
paths = ["../whoa-this-does-not-exist"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to update path override `[..]../whoa-this-does-not-exist` \
(defined in `[..]`)
Caused by:
[..] (os error [..])
-"));
+",
+ ),
+ );
}
#[test]
Package::new("bar", "1.0.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "top"
version = "0.5.0"
[dependencies]
foo = { path = "foo" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
// Change the dependency on `bar` to an invalid path
- File::create(&p.root().join("foo/Cargo.toml")).unwrap().write_all(br#"
+ File::create(&p.root().join("foo/Cargo.toml"))
+ .unwrap()
+ .write_all(
+ br#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
bar = { path = "" }
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
// Make sure we get a nice error. In the past this actually stack
// overflowed!
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: no matching package named `bar` found
location searched: [..]
required by package `foo v0.5.0 ([..])`
-"));
+",
+ ),
+ );
}
#[test]
fn workspace_produces_rlib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "top"
version = "0.5.0"
[dependencies]
foo = { path = "foo" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
.build();
assert_that(&p.root().join("target/debug/libtop.rlib"), existing_file());
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
-
}
#[test]
fn thin_lto_works() {
if !cargotest::is_nightly() {
- return
+ return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "top"
version = "0.5.0"
[profile.release]
lto = 'thin'
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--release").arg("-v"),
- execs().with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--release").arg("-v"),
+ execs().with_stderr(
+ "\
[COMPILING] top [..]
[RUNNING] `rustc [..] -C lto=thin [..]`
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
use std::env;
use cargotest::{is_nightly, rustc_host};
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn plugin_to_the_max() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(plugin)]
#![plugin(bar)]
extern crate foo_lib;
fn main() { foo_lib::foo(); }
- "#)
- .file("src/foo_lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/foo_lib.rs",
+ r#"
#![feature(plugin)]
#![plugin(bar)]
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies.baz]
path = "../baz"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(plugin_registrar, rustc_private)]
extern crate rustc_plugin;
pub fn foo(_reg: &mut Registry) {
println!("{}", baz::baz());
}
- "#)
+ "#,
+ )
.build();
let _baz = project("baz")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.0.1"
[lib]
name = "baz"
crate_type = ["dylib"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
.build();
- assert_that(foo.cargo("build"),
- execs().with_status(0));
- assert_that(foo.cargo("doc"),
- execs().with_status(0));
+ assert_that(foo.cargo("build"), execs().with_status(0));
+ assert_that(foo.cargo("doc"), execs().with_status(0));
}
#[test]
fn plugin_with_dynamic_native_dependency() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
let workspace = project("ws")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["builder", "foo"]
- "#)
+ "#,
+ )
.build();
let build = project("ws/builder")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "builder"
version = "0.0.1"
[lib]
name = "builder"
crate-type = ["dylib"]
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[no_mangle]
pub extern fn foo() {}
- "#)
+ "#,
+ )
.build();
let foo = project("ws/foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(plugin)]
#![plugin(bar)]
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
plugin = true
- "#)
- .file("bar/build.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/build.rs",
+ r#"
use std::path::PathBuf;
use std::env;
let src = PathBuf::from(env::var("SRC").unwrap());
println!("cargo:rustc-flags=-L {}/deps", src.parent().unwrap().display());
}
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#![feature(plugin_registrar, rustc_private)]
extern crate rustc_plugin;
pub fn bar(_reg: &mut Registry) {
unsafe { foo() }
}
- "#)
+ "#,
+ )
.build();
- assert_that(build.cargo("build"),
- execs().with_status(0));
+ assert_that(build.cargo("build"), execs().with_status(0));
let src = workspace.root().join("target/debug");
- let lib = fs::read_dir(&src).unwrap().map(|s| s.unwrap().path()).find(|lib| {
- let lib = lib.file_name().unwrap().to_str().unwrap();
- lib.starts_with(env::consts::DLL_PREFIX) &&
- lib.ends_with(env::consts::DLL_SUFFIX)
- }).unwrap();
-
- assert_that(foo.cargo("build").env("SRC", &lib).arg("-v"),
- execs().with_status(0));
+ let lib = fs::read_dir(&src)
+ .unwrap()
+ .map(|s| s.unwrap().path())
+ .find(|lib| {
+ let lib = lib.file_name().unwrap().to_str().unwrap();
+ lib.starts_with(env::consts::DLL_PREFIX) && lib.ends_with(env::consts::DLL_SUFFIX)
+ })
+ .unwrap();
+
+ assert_that(
+ foo.cargo("build").env("SRC", &lib).arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
fn plugin_integration() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
name = "foo"
plugin = true
doctest = false
- "#)
+ "#,
+ )
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "")
.file("tests/it_works.rs", "")
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
}
#[test]
fn doctest_a_plugin() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
bar = { path = "bar" }
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[macro_use]
extern crate bar;
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
plugin = true
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn bar() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
}
// See #1515
let target = rustc_host();
let _foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
plugin = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies.foo]
path = "../foo"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}]
ar = "nonexistent-ar"
linker = "nonexistent-linker"
- "#, target))
+ "#,
+ target
+ ),
+ )
.build();
- assert_that(bar.cargo("build").arg("--verbose"),
- execs().with_stderr_contains("\
+ assert_that(
+ bar.cargo("build").arg("--verbose"),
+ execs().with_stderr_contains(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]`
[ERROR] [..]linker[..]
-"));
+",
+ ),
+ );
}
#[test]
fn panic_abort_plugins() {
if !is_nightly() {
- return
+ return;
}
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies]
foo = { path = "foo" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
plugin = true
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
#![feature(rustc_private)]
extern crate syntax;
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn shared_panic_abort_plugins() {
if !is_nightly() {
- return
+ return;
}
let p = project("top")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "top"
version = "0.0.1"
[dependencies]
foo = { path = "foo" }
bar = { path = "bar" }
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate bar;
- ")
- .file("foo/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
bar = { path = "../bar" }
- "#)
- .file("foo/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ r#"
#![feature(rustc_private)]
extern crate syntax;
extern crate bar;
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
use cargotest::is_nightly;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
}
let client = project("client")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "client"
version = "0.0.1"
[target.'cfg(not(stage300))'.dependencies.noop]
path = "../noop"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(proc_macro)]
#[macro_use]
struct X;
fn main() {}
- "#)
+ "#,
+ )
.build();
let _noop = project("noop")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "noop"
version = "0.0.1"
[lib]
proc-macro = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(proc_macro, proc_macro_lib)]
extern crate proc_macro;
pub fn noop(_input: TokenStream) -> TokenStream {
"".parse().unwrap()
}
- "#)
+ "#,
+ )
.build();
- assert_that(client.cargo("build"),
- execs().with_status(0));
+ assert_that(client.cargo("build"), execs().with_status(0));
}
#[test]
}
let client = project("client")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "client"
version = "0.0.1"
[dependencies.noop]
path = "../noop"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(proc_macro)]
#[macro_use]
struct X;
fn main() {}
- "#)
+ "#,
+ )
.build();
let _noop = project("noop")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "noop"
version = "0.0.1"
[lib]
proc-macro = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(proc_macro, proc_macro_lib)]
extern crate proc_macro;
pub fn noop(_input: TokenStream) -> TokenStream {
"".parse().unwrap()
}
- "#)
+ "#,
+ )
.build();
- assert_that(client.cargo("build"),
- execs().with_status(0));
- assert_that(client.cargo("build"),
- execs().with_status(0));
+ assert_that(client.cargo("build"), execs().with_status(0));
+ assert_that(client.cargo("build"), execs().with_status(0));
}
#[test]
}
let client = project("client")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "client"
version = "0.0.1"
[dependencies.transmogrify]
path = "../transmogrify"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#![feature(proc_macro)]
#[macro_use]
assert!(x.impl_by_transmogrify());
println!("{:?}", x);
}
- "#)
+ "#,
+ )
.build();
let _transmogrify = project("transmogrify")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "transmogrify"
version = "0.0.1"
[lib]
proc-macro = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(proc_macro, proc_macro_lib)]
extern crate proc_macro;
}
".parse().unwrap()
}
- "#)
+ "#,
+ )
.build();
- assert_that(client.cargo("build"),
- execs().with_status(0));
- assert_that(client.cargo("run"),
- execs().with_status(0).with_stdout("X { success: true }"));
+ assert_that(client.cargo("build"), execs().with_status(0));
+ assert_that(
+ client.cargo("run"),
+ execs().with_status(0).with_stdout("X { success: true }"),
+ );
}
#[test]
}
let questionable = project("questionable")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "questionable"
version = "0.0.1"
[lib]
plugin = true
proc-macro = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(plugin_registrar, rustc_private)]
#![feature(proc_macro, proc_macro_lib)]
pub fn questionable(input: TokenStream) -> TokenStream {
input
}
- "#)
+ "#,
+ )
.build();
let msg = " lib.plugin and lib.proc-macro cannot both be true";
- assert_that(questionable.cargo("build"),
- execs().with_status(101).with_stderr_contains(msg));
+ assert_that(
+ questionable.cargo("build"),
+ execs().with_status(101).with_stderr_contains(msg),
+ );
}
#[test]
fn proc_macro_doctest() {
if !is_nightly() {
- return
+ return;
}
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[lib]
proc-macro = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#![feature(proc_macro, proc_macro_lib)]
#![crate_type = "proc-macro"]
fn a() {
assert!(true);
}
-"#)
+"#,
+ )
.build();
- assert_that(foo.cargo("test"),
- execs().with_status(0)
- .with_stdout_contains("test a ... ok")
- .with_stdout_contains_n("test [..] ... ok", 2));
+ assert_that(
+ foo.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stdout_contains("test a ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 2),
+ );
}
use std::env;
use cargotest::is_nightly;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn profile_overrides() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
opt-level = 1
debug = false
rpath = true
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [optimized] target(s) in [..]
",
-dir = p.root().display(),
-url = p.url(),
-)));
+ dir = p.root().display(),
+ url = p.url(),
+ )),
+ );
}
#[test]
fn opt_level_override_0() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
[profile.dev]
opt-level = 0
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] [..] target(s) in [..]
",
-dir = p.root().display(),
-url = p.url()
-)));
+ dir = p.root().display(),
+ url = p.url()
+ )),
+ );
}
#[test]
fn debug_override_1() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
version = "0.0.0"
[profile.dev]
debug = 1
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] [..] target(s) in [..]
",
-dir = p.root().display(),
-url = p.url()
-)));
+ dir = p.root().display(),
+ url = p.url()
+ )),
+ );
}
fn check_opt_level_override(profile_level: &str, rustc_level: &str) {
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "test"
[profile.dev]
opt-level = {level}
- "#, level = profile_level))
+ "#,
+ level = profile_level
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] [..] target(s) in [..]
",
-dir = p.root().display(),
-url = p.url(),
-level = rustc_level
-)));
+ dir = p.root().display(),
+ url = p.url(),
+ level = rustc_level
+ )),
+ );
}
#[test]
fn opt_level_overrides() {
- if !is_nightly() { return }
+ if !is_nightly() {
+ return;
+ }
for &(profile_level, rustc_level) in &[
("1", "1"),
#[test]
fn top_level_overrides_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "test"
[dependencies.foo]
path = "foo"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
[lib]
name = "foo"
crate_type = ["dylib", "rlib"]
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v").arg("--release"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build").arg("-v").arg("--release"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.0 ({url}/foo)
[RUNNING] `rustc --crate-name foo foo[/]src[/]lib.rs \
--crate-type dylib --crate-type rlib \
--extern foo={dir}[/]target[/]release[/]deps[/]libfoo.rlib`
[FINISHED] release [optimized + debuginfo] target(s) in [..]
",
- dir = p.root().display(),
- url = p.url(),
- prefix = env::consts::DLL_PREFIX,
- suffix = env::consts::DLL_SUFFIX)));
+ dir = p.root().display(),
+ url = p.url(),
+ prefix = env::consts::DLL_PREFIX,
+ suffix = env::consts::DLL_SUFFIX
+ )),
+ );
}
#[test]
fn profile_in_non_root_manifest_triggers_a_warning() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[profile.dev]
debug = false
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
[profile.dev]
opt-level = 1
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").cwd(p.root().join("bar")).arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")).arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[WARNING] profiles for the non root package will be ignored, specify profiles at the workspace root:
package: [..]
workspace: [..]
[COMPILING] bar v0.1.0 ([..])
[RUNNING] `rustc [..]`
-[FINISHED] dev [unoptimized] target(s) in [..]"));
+[FINISHED] dev [unoptimized] target(s) in [..]",
+ ),
+ );
}
#[test]
fn profile_in_virtual_manifest_works() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["bar"]
[profile.dev]
opt-level = 1
debug = false
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
workspace = ".."
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").cwd(p.root().join("bar")).arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")).arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] bar v0.1.0 ([..])
[RUNNING] `rustc [..]`
-[FINISHED] dev [optimized] target(s) in [..]"));
+[FINISHED] dev [optimized] target(s) in [..]",
+ ),
+ );
}
use cargotest::ChannelChanger;
use cargotest::support::git::repo;
use cargotest::support::paths;
-use cargotest::support::{project, execs, publish};
+use cargotest::support::{execs, project, publish};
use flate2::read::GzDecoder;
use hamcrest::assert_that;
use tar::Archive;
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
license = "MIT"
description = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").arg("--no-verify")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("publish")
+ .arg("--no-verify")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `{reg}`
[WARNING] manifest has no documentation, [..]
See [..]
[PACKAGING] foo v0.0.1 ({dir})
[UPLOADING] foo v0.0.1 ({dir})
",
- dir = p.url(),
- reg = publish::registry())));
+ dir = p.url(),
+ reg = publish::registry()
+ )),
+ );
let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap();
// Skip the metadata payload and the size of the tarball
let mut sz = [0; 4];
assert_eq!(f.read(&mut sz).unwrap(), 4);
- let sz = ((sz[0] as u32) << 0) |
- ((sz[1] as u32) << 8) |
- ((sz[2] as u32) << 16) |
- ((sz[3] as u32) << 24);
+ let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16)
+ | ((sz[3] as u32) << 24);
f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap();
// Verify the tarball
let mut rdr = GzDecoder::new(f);
- assert_eq!(rdr.header().unwrap().filename().unwrap(), b"foo-0.0.1.crate");
+ assert_eq!(
+ rdr.header().unwrap().filename().unwrap(),
+ b"foo-0.0.1.crate"
+ );
let mut contents = Vec::new();
rdr.read_to_end(&mut contents).unwrap();
let mut ar = Archive::new(&contents[..]);
let file = file.unwrap();
let fname = file.header().path_bytes();
let fname = &*fname;
- assert!(fname == b"foo-0.0.1/Cargo.toml" ||
- fname == b"foo-0.0.1/Cargo.toml.orig" ||
- fname == b"foo-0.0.1/src/main.rs",
- "unexpected filename: {:?}", file.header().path());
+ assert!(
+ fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig"
+ || fname == b"foo-0.0.1/src/main.rs",
+ "unexpected filename: {:?}",
+ file.header().path()
+ );
}
}
let credentials = paths::root().join("home/.cargo/credentials");
File::create(credentials)
.unwrap()
- .write_all(br#"
+ .write_all(
+ br#"
token = "api-token"
- "#)
+ "#,
+ )
.unwrap();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
license = "MIT"
description = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").arg("--no-verify")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("publish")
+ .arg("--no-verify")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `{reg}`
[WARNING] manifest has no documentation, [..]
See [..]
[PACKAGING] foo v0.0.1 ({dir})
[UPLOADING] foo v0.0.1 ({dir})
",
- dir = p.url(),
- reg = publish::registry())));
+ dir = p.url(),
+ reg = publish::registry()
+ )),
+ );
let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap();
// Skip the metadata payload and the size of the tarball
let mut sz = [0; 4];
assert_eq!(f.read(&mut sz).unwrap(), 4);
- let sz = ((sz[0] as u32) << 0) |
- ((sz[1] as u32) << 8) |
- ((sz[2] as u32) << 16) |
- ((sz[3] as u32) << 24);
+ let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16)
+ | ((sz[3] as u32) << 24);
f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap();
// Verify the tarball
let mut rdr = GzDecoder::new(f);
- assert_eq!(rdr.header().unwrap().filename().unwrap(), b"foo-0.0.1.crate");
+ assert_eq!(
+ rdr.header().unwrap().filename().unwrap(),
+ b"foo-0.0.1.crate"
+ );
let mut contents = Vec::new();
rdr.read_to_end(&mut contents).unwrap();
let mut ar = Archive::new(&contents[..]);
let file = file.unwrap();
let fname = file.header().path_bytes();
let fname = &*fname;
- assert!(fname == b"foo-0.0.1/Cargo.toml" ||
- fname == b"foo-0.0.1/Cargo.toml.orig" ||
- fname == b"foo-0.0.1/src/main.rs",
- "unexpected filename: {:?}", file.header().path());
+ assert!(
+ fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig"
+ || fname == b"foo-0.0.1/src/main.rs",
+ "unexpected filename: {:?}",
+ file.header().path()
+ );
}
}
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
license = "MIT"
description = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").arg("--no-verify")
- .arg("--host").arg(publish::registry().to_string()),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("publish")
+ .arg("--no-verify")
+ .arg("--host")
+ .arg(publish::registry().to_string()),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[WARNING] The flag '--host' is no longer valid.
Previous versions of Cargo accepted this flag, but it is being
[PACKAGING] foo v0.0.1 ({dir})
[UPLOADING] foo v0.0.1 ({dir})
",
- dir = p.url(),
- reg = publish::registry())));
+ dir = p.url(),
+ reg = publish::registry()
+ )),
+ );
let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap();
// Skip the metadata payload and the size of the tarball
let mut sz = [0; 4];
assert_eq!(f.read(&mut sz).unwrap(), 4);
- let sz = ((sz[0] as u32) << 0) |
- ((sz[1] as u32) << 8) |
- ((sz[2] as u32) << 16) |
- ((sz[3] as u32) << 24);
+ let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16)
+ | ((sz[3] as u32) << 24);
f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap();
// Verify the tarball
let mut rdr = GzDecoder::new(f);
- assert_eq!(rdr.header().unwrap().filename().unwrap(), "foo-0.0.1.crate".as_bytes());
+ assert_eq!(
+ rdr.header().unwrap().filename().unwrap(),
+ "foo-0.0.1.crate".as_bytes()
+ );
let mut contents = Vec::new();
rdr.read_to_end(&mut contents).unwrap();
let mut ar = Archive::new(&contents[..]);
let file = file.unwrap();
let fname = file.header().path_bytes();
let fname = &*fname;
- assert!(fname == b"foo-0.0.1/Cargo.toml" ||
- fname == b"foo-0.0.1/Cargo.toml.orig" ||
- fname == b"foo-0.0.1/src/main.rs",
- "unexpected filename: {:?}", file.header().path());
+ assert!(
+ fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig"
+ || fname == b"foo-0.0.1/src/main.rs",
+ "unexpected filename: {:?}",
+ file.header().path()
+ );
}
}
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
license = "MIT"
description = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").arg("--no-verify")
- .arg("--index").arg(publish::registry().to_string())
- .arg("--host").arg(publish::registry().to_string()),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("publish")
+ .arg("--no-verify")
+ .arg("--index")
+ .arg(publish::registry().to_string())
+ .arg("--host")
+ .arg(publish::registry().to_string()),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[WARNING] The flag '--host' is no longer valid.
Previous versions of Cargo accepted this flag, but it is being
[PACKAGING] foo v0.0.1 ({dir})
[UPLOADING] foo v0.0.1 ({dir})
",
- dir = p.url(),
- reg = publish::registry())));
+ dir = p.url(),
+ reg = publish::registry()
+ )),
+ );
let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap();
// Skip the metadata payload and the size of the tarball
let mut sz = [0; 4];
assert_eq!(f.read(&mut sz).unwrap(), 4);
- let sz = ((sz[0] as u32) << 0) |
- ((sz[1] as u32) << 8) |
- ((sz[2] as u32) << 16) |
- ((sz[3] as u32) << 24);
+ let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16)
+ | ((sz[3] as u32) << 24);
f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap();
// Verify the tarball
let mut rdr = GzDecoder::new(f);
- assert_eq!(rdr.header().unwrap().filename().unwrap(), "foo-0.0.1.crate".as_bytes());
+ assert_eq!(
+ rdr.header().unwrap().filename().unwrap(),
+ "foo-0.0.1.crate".as_bytes()
+ );
let mut contents = Vec::new();
rdr.read_to_end(&mut contents).unwrap();
let mut ar = Archive::new(&contents[..]);
let file = file.unwrap();
let fname = file.header().path_bytes();
let fname = &*fname;
- assert!(fname == b"foo-0.0.1/Cargo.toml" ||
- fname == b"foo-0.0.1/Cargo.toml.orig" ||
- fname == b"foo-0.0.1/src/main.rs",
- "unexpected filename: {:?}", file.header().path());
+ assert!(
+ fname == b"foo-0.0.1/Cargo.toml" || fname == b"foo-0.0.1/Cargo.toml.orig"
+ || fname == b"foo-0.0.1/src/main.rs",
+ "unexpected filename: {:?}",
+ file.header().path()
+ );
}
}
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.foo]
git = "git://path/to/nowhere"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").arg("-v").arg("--no-verify")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish")
+ .arg("-v")
+ .arg("--no-verify")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry [..]
[ERROR] crates cannot be published to crates.io with dependencies sourced from \
a repository\neither publish `foo` as its own crate on crates.io and \
specify a crates.io version as a dependency or pull it into this \
repository and specify it with a path and version\n\
(crate `foo` has repository path `git://path/to/nowhere`)\
-"));
+",
+ ),
+ );
}
#[test]
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("publish")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry [..]
[ERROR] all path dependencies must have a version specified when publishing.
dependency `bar` does not specify a version
-"));
+",
+ ),
+ );
}
#[test]
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
license = "MIT"
description = "foo"
publish = false
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] some crates cannot be published.
`foo` is marked as unpublishable
-"));
+",
+ ),
+ );
}
#[test]
fn dont_publish_dirty() {
publish::setup();
- let p = project("foo")
- .file("bar", "")
- .build();
+ let p = project("foo").file("bar", "").build();
let _ = repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
documentation = "foo"
homepage = "foo"
repository = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry `[..]`
error: 1 files in the working directory contain changes that were not yet \
committed into git:
bar
to proceed despite this, pass the `--allow-dirty` flag
-"));
+",
+ ),
+ );
}
#[test]
let p = project("foo").build();
let _ = repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
documentation = "foo"
homepage = "foo"
repository = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(0));
+ assert_that(
+ p.cargo("publish")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(0),
+ );
}
#[test]
fn publish_in_sub_repo() {
publish::setup();
- let p = project("foo")
- .file("baz", "")
- .build();
+ let p = project("foo").file("baz", "").build();
let _ = repo(&paths::root().join("foo"))
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
documentation = "foo"
homepage = "foo"
repository = "foo"
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").cwd(p.root().join("bar"))
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(0));
+ assert_that(
+ p.cargo("publish")
+ .cwd(p.root().join("bar"))
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(0),
+ );
}
#[test]
fn publish_when_ignored() {
publish::setup();
- let p = project("foo")
- .file("baz", "")
- .build();
+ let p = project("foo").file("baz", "").build();
let _ = repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
documentation = "foo"
homepage = "foo"
repository = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file(".gitignore", "baz")
.build();
- assert_that(p.cargo("publish")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(0));
+ assert_that(
+ p.cargo("publish")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(0),
+ );
}
#[test]
fn ignore_when_crate_ignored() {
publish::setup();
- let p = project("foo")
- .file("bar/baz", "")
- .build();
+ let p = project("foo").file("bar/baz", "").build();
let _ = repo(&paths::root().join("foo"))
.file(".gitignore", "bar")
- .nocommit_file("bar/Cargo.toml", r#"
+ .nocommit_file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
documentation = "foo"
homepage = "foo"
repository = "foo"
- "#)
+ "#,
+ )
.nocommit_file("bar/src/main.rs", "fn main() {}");
- assert_that(p.cargo("publish").cwd(p.root().join("bar"))
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(0));
+ assert_that(
+ p.cargo("publish")
+ .cwd(p.root().join("bar"))
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(0),
+ );
}
#[test]
fn new_crate_rejected() {
publish::setup();
- let p = project("foo")
- .file("baz", "")
- .build();
+ let p = project("foo").file("baz", "").build();
let _ = repo(&paths::root().join("foo"))
- .nocommit_file("Cargo.toml", r#"
+ .nocommit_file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
documentation = "foo"
homepage = "foo"
repository = "foo"
- "#)
+ "#,
+ )
.nocommit_file("src/main.rs", "fn main() {}");
- assert_that(p.cargo("publish")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(101));
+ assert_that(
+ p.cargo("publish")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(101),
+ );
}
#[test]
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
license = "MIT"
description = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").arg("--dry-run")
- .arg("--index").arg(publish::registry().to_string()),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("publish")
+ .arg("--dry-run")
+ .arg("--index")
+ .arg(publish::registry().to_string()),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[WARNING] manifest has no documentation, [..]
See [..]
[UPLOADING] foo v0.0.1 ({dir})
[WARNING] aborting upload due to dry run
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
// Ensure the API request wasn't actually made
assert!(!publish::upload_path().join("api/v1/crates/new").exists());
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
publish = [
"test"
]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("-Zunstable-options"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
feature `alternative-registries` is required
consider adding `cargo-features = [\"alternative-registries\"]` to the manifest
-"));
+",
+ ),
+ );
}
#[test]
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
publish = [
"test"
]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("-Zunstable-options"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] some crates cannot be published.
`foo` is marked as unpublishable
-"));
+",
+ ),
+ );
}
#[test]
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
license = "MIT"
description = "foo"
publish = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("-Zunstable-options"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] some crates cannot be published.
`foo` is marked as unpublishable
-"));
+",
+ ),
+ );
}
#[test]
let p = project("foo").build();
let _ = repo(&paths::root().join("foo"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
documentation = "foo"
homepage = "foo"
publish = ["alternative"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative").arg("-Zunstable-options"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs().with_status(0),
+ );
}
#[test]
publish::setup();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["alternative-registries"]
[project]
license = "MIT"
description = "foo"
publish = []
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("publish").masquerade_as_nightly_cargo()
- .arg("--registry").arg("alternative")
- .arg("-Zunstable-options"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("publish")
+ .masquerade_as_nightly_cargo()
+ .arg("--registry")
+ .arg("alternative")
+ .arg("-Zunstable-options"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] some crates cannot be published.
`foo` is marked as unpublishable
-"));
+",
+ ),
+ );
}
-use cargotest::support::{project, execs, main_file, basic_bin_manifest};
-use hamcrest::{assert_that};
+use cargotest::support::{basic_bin_manifest, execs, main_file, project};
+use hamcrest::assert_that;
static MANIFEST_OUTPUT: &'static str = r#"
{
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("read-manifest")
- .arg("--manifest-path").arg("foo/Cargo.toml")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(0)
- .with_json(MANIFEST_OUTPUT));
+ assert_that(
+ p.cargo("read-manifest")
+ .arg("--manifest-path")
+ .arg("foo/Cargo.toml")
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(0).with_json(MANIFEST_OUTPUT),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("read-manifest")
- .arg("--manifest-path").arg(p.root().join("Cargo.toml"))
- .cwd(p.root().parent().unwrap()),
- execs().with_status(0)
- .with_json(MANIFEST_OUTPUT));
+ assert_that(
+ p.cargo("read-manifest")
+ .arg("--manifest-path")
+ .arg(p.root().join("Cargo.toml"))
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(0).with_json(MANIFEST_OUTPUT),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("read-manifest")
- .arg("--manifest-path").arg("foo")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(101)
- .with_stderr("[ERROR] the manifest-path must be \
- a path to a Cargo.toml file"));
+ assert_that(
+ p.cargo("read-manifest")
+ .arg("--manifest-path")
+ .arg("foo")
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(101).with_stderr(
+ "[ERROR] the manifest-path must be \
+ a path to a Cargo.toml file",
+ ),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("read-manifest")
- .arg("--manifest-path").arg(p.root())
- .cwd(p.root().parent().unwrap()),
- execs().with_status(101)
- .with_stderr("[ERROR] the manifest-path must be \
- a path to a Cargo.toml file"));
+ assert_that(
+ p.cargo("read-manifest")
+ .arg("--manifest-path")
+ .arg(p.root())
+ .cwd(p.root().parent().unwrap()),
+ execs().with_status(101).with_stderr(
+ "[ERROR] the manifest-path must be \
+ a path to a Cargo.toml file",
+ ),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("read-manifest")
- .cwd(p.root()),
- execs().with_status(0)
- .with_json(MANIFEST_OUTPUT));
+ assert_that(
+ p.cargo("read-manifest").cwd(p.root()),
+ execs().with_status(0).with_json(MANIFEST_OUTPUT),
+ );
}
use cargotest::support::git;
use cargotest::support::paths::{self, CargoPathExt};
use cargotest::support::registry::{self, Package};
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
use url::Url;
-fn registry_path() -> PathBuf { paths::root().join("registry") }
-fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() }
+fn registry_path() -> PathBuf {
+ paths::root().join("registry")
+}
+fn registry() -> Url {
+ Url::from_file_path(&*registry_path()).ok().unwrap()
+}
#[test]
fn simple() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = ">= 0.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `{reg}`
[DOWNLOADING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url(),
- reg = registry::registry())));
+ dir = p.url(),
+ reg = registry::registry()
+ )),
+ );
assert_that(p.cargo("clean"), execs().with_status(0));
// Don't download a second time
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = ">= 0.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("baz", "0.0.1").publish();
Package::new("bar", "0.0.1").dep("baz", "*").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `{reg}`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url(),
- reg = registry::registry())));
+ dir = p.url(),
+ reg = registry::registry()
+ )),
+ );
}
#[test]
Package::new("init", "0.0.1").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
nonexistent = ">= 0.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry [..]
error: no matching package named `nonexistent` found
location searched: registry [..]
required by package `foo v0.0.1 ([..])`
-"));
+",
+ ),
+ );
}
#[test]
fn wrong_version() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
foo = ">= 1.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("foo", "0.0.1").publish();
Package::new("foo", "0.0.2").publish();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: no matching version `>= 1.0.0` found for package `foo`
location searched: registry [..]
versions found: 0.0.2, 0.0.1
required by package `foo v0.0.1 ([..])`
-"));
+",
+ ),
+ );
Package::new("foo", "0.0.3").publish();
Package::new("foo", "0.0.4").publish();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: no matching version `>= 1.0.0` found for package `foo`
location searched: registry [..]
versions found: 0.0.4, 0.0.3, 0.0.2, ...
required by package `foo v0.0.1 ([..])`
-"));
+",
+ ),
+ );
}
#[test]
fn bad_cksum() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bad-cksum = ">= 0.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
pkg.publish();
t!(File::create(&pkg.archive_dst()));
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] registry [..]
[DOWNLOADING] bad-cksum [..]
[ERROR] unable to get packages from source
Caused by:
failed to verify the checksum of `bad-cksum v0.0.1 (registry `file://[..]`)`
-"));
+",
+ ),
+ );
}
#[test]
Package::new("init", "0.0.1").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
notyet = ">= 0.0.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: no matching package named `notyet` found
location searched: registry `[..]`
required by package `foo v0.0.1 ([..])`
-"));
+",
+ ),
+ );
Package::new("notyet", "0.0.1").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `{reg}`
[DOWNLOADING] notyet v0.0.1 (registry `file://[..]`)
[COMPILING] notyet v0.0.1
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url(),
- reg = registry::registry())));
+ dir = p.url(),
+ reg = registry::registry()
+ )),
+ );
}
#[test]
Package::new("init", "0.0.1").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.notyet]
version = "0.0.1"
path = "notyet"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("notyet/Cargo.toml", r#"
+ .file(
+ "notyet/Cargo.toml",
+ r#"
[package]
name = "notyet"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("notyet/src/lib.rs", "")
.build();
- assert_that(p.cargo("package").arg("-v"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ p.cargo("package").arg("-v"),
+ execs().with_status(101).with_stderr_contains(
+ "\
[ERROR] failed to verify package tarball
Caused by:
no matching package named `notyet` found
location searched: registry [..]
required by package `foo v0.0.1 ([..])`
-"));
+",
+ ),
+ );
Package::new("notyet", "0.0.1").publish();
- assert_that(p.cargo("package"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("package"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[PACKAGING] foo v0.0.1 ({dir})
[VERIFYING] foo v0.0.1 ({dir})
[UPDATING] registry `[..]`
[COMPILING] notyet v0.0.1
[COMPILING] foo v0.0.1 ({dir}[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
}
#[test]
fn lockfile_locks() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
p.root().move_into_the_past();
Package::new("bar", "0.0.2").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[test]
fn lockfile_locks_transitively() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("baz", "0.0.1").publish();
Package::new("bar", "0.0.1").dep("baz", "*").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
p.root().move_into_the_past();
Package::new("baz", "0.0.2").publish();
Package::new("bar", "0.0.2").dep("baz", "*").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[test]
fn yanks_are_not_used() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("baz", "0.0.1").publish();
Package::new("baz", "0.0.2").yanked(true).publish();
Package::new("bar", "0.0.1").dep("baz", "*").publish();
- Package::new("bar", "0.0.2").dep("baz", "*").yanked(true).publish();
+ Package::new("bar", "0.0.2")
+ .dep("baz", "*")
+ .yanked(true)
+ .publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn relying_on_a_yank_is_bad() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("baz", "0.0.2").yanked(true).publish();
Package::new("bar", "0.0.1").dep("baz", "=0.0.2").publish();
- assert_that(p.cargo("build"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: no matching version `= 0.0.2` found for package `baz`
location searched: registry `[..]`
versions found: 0.0.1
required by package `bar v0.0.1`
-"));
+",
+ ),
+ );
}
#[test]
fn yanks_in_lockfiles_are_ok() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
registry::registry_path().join("3").rm_rf();
Package::new("bar", "0.0.1").yanked(true).publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
- assert_that(p.cargo("update"),
- execs().with_status(101).with_stderr_contains("\
+ assert_that(
+ p.cargo("update"),
+ execs().with_status(101).with_stderr_contains(
+ "\
error: no matching package named `bar` found
location searched: registry [..]
required by package `foo v0.0.1 ([..])`
-"));
+",
+ ),
+ );
}
#[test]
fn update_with_lockfile_if_packages_missing() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.0.1").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
p.root().move_into_the_past();
paths::home().join(".cargo/registry").rm_rf();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] bar v0.0.1 (registry `file://[..]`)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
-"));
+",
+ ),
+ );
}
#[test]
fn update_lockfile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
println!("0.0.1");
Package::new("bar", "0.0.1").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
Package::new("bar", "0.0.2").publish();
Package::new("bar", "0.0.3").publish();
paths::home().join(".cargo/registry").rm_rf();
println!("0.0.2 update");
- assert_that(p.cargo("update")
- .arg("-p").arg("bar").arg("--precise").arg("0.0.2"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("update")
+ .arg("-p")
+ .arg("bar")
+ .arg("--precise")
+ .arg("0.0.2"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[UPDATING] bar v0.0.1 -> v0.0.2
-"));
+",
+ ),
+ );
println!("0.0.2 build");
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[DOWNLOADING] [..] v0.0.2 (registry `file://[..]`)
[COMPILING] bar v0.0.2
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
println!("0.0.3 update");
- assert_that(p.cargo("update")
- .arg("-p").arg("bar"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("update").arg("-p").arg("bar"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[UPDATING] bar v0.0.2 -> v0.0.3
-"));
+",
+ ),
+ );
println!("0.0.3 build");
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[DOWNLOADING] [..] v0.0.3 (registry `file://[..]`)
[COMPILING] bar v0.0.3
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
-
- println!("new dependencies update");
- Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish();
- Package::new("spam", "0.2.5").publish();
- assert_that(p.cargo("update")
- .arg("-p").arg("bar"),
- execs().with_status(0).with_stderr("\
+ dir = p.url()
+ )),
+ );
+
+ println!("new dependencies update");
+ Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish();
+ Package::new("spam", "0.2.5").publish();
+ assert_that(
+ p.cargo("update").arg("-p").arg("bar"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[UPDATING] bar v0.0.3 -> v0.0.4
[ADDING] spam v0.2.5
-"));
-
- println!("new dependencies update");
- Package::new("bar", "0.0.5").publish();
- assert_that(p.cargo("update")
- .arg("-p").arg("bar"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ println!("new dependencies update");
+ Package::new("bar", "0.0.5").publish();
+ assert_that(
+ p.cargo("update").arg("-p").arg("bar"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[UPDATING] bar v0.0.4 -> v0.0.5
[REMOVING] spam v0.2.5
-"));
+",
+ ),
+ );
}
#[test]
-fn update_offline(){
+fn update_offline() {
use cargotest::ChannelChanger;
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("update").masquerade_as_nightly_cargo().arg("-Zoffline"),
- execs().with_status(101).
- with_stderr("error: you can't update in the offline mode[..]"));
+ assert_that(
+ p.cargo("update")
+ .masquerade_as_nightly_cargo()
+ .arg("-Zoffline"),
+ execs()
+ .with_status(101)
+ .with_stderr("error: you can't update in the offline mode[..]"),
+ );
}
#[test]
fn dev_dependency_not_used() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("baz", "0.0.1").publish();
Package::new("bar", "0.0.1").dev_dep("baz", "*").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn login_with_no_cargo_dir() {
let home = paths::home().join("new-home");
t!(fs::create_dir(&home));
- assert_that(cargo_process().arg("login").arg("foo").arg("-v"),
- execs().with_status(0));
+ assert_that(
+ cargo_process().arg("login").arg("foo").arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
// Verify that the configuration file gets properly trunchated.
let home = paths::home().join("new-home");
t!(fs::create_dir(&home));
- assert_that(cargo_process().arg("login").arg("lmaolmaolmao").arg("-v"),
- execs().with_status(0));
- assert_that(cargo_process().arg("login").arg("lmao").arg("-v"),
- execs().with_status(0));
- assert_that(cargo_process().arg("login").arg("lmaolmaolmao").arg("-v"),
- execs().with_status(0));
+ assert_that(
+ cargo_process().arg("login").arg("lmaolmaolmao").arg("-v"),
+ execs().with_status(0),
+ );
+ assert_that(
+ cargo_process().arg("login").arg("lmao").arg("-v"),
+ execs().with_status(0),
+ );
+ assert_that(
+ cargo_process().arg("login").arg("lmaolmaolmao").arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
fn bad_license_file() {
Package::new("foo", "1.0.0").publish();
let p = project("all")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
license-file = "foo"
description = "bar"
repository = "baz"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("publish")
- .arg("-v")
- .arg("--index").arg(registry().to_string()),
- execs().with_status(101)
- .with_stderr_contains("\
-[ERROR] the license file `foo` does not exist"));
+ assert_that(
+ p.cargo("publish")
+ .arg("-v")
+ .arg("--index")
+ .arg(registry().to_string()),
+ execs().with_status(101).with_stderr_contains(
+ "\
+ [ERROR] the license file `foo` does not exist",
+ ),
+ );
}
#[test]
fn updating_a_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
Package::new("bar", "0.0.1").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
- t!(t!(File::create(&p.root().join("a/Cargo.toml"))).write_all(br#"
+ t!(t!(File::create(&p.root().join("a/Cargo.toml"))).write_all(
+ br#"
[project]
name = "a"
version = "0.0.1"
[dependencies]
bar = "0.1.0"
- "#));
+ "#
+ ));
Package::new("bar", "0.1.0").publish();
println!("second");
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] bar v0.1.0 (registry `file://[..]`)
[COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn git_and_registry_dep() {
let b = git::repo(&paths::root().join("b"))
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.0.1"
[dependencies]
a = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.b]
git = '{}'
- "#, b.url()))
+ "#,
+ b.url()
+ ),
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("a", "0.0.1").publish();
p.root().move_into_the_past();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] [..]
[UPDATING] [..]
[DOWNLOADING] a v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
p.root().move_into_the_past();
println!("second");
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[test]
// First generate a Cargo.lock and a clone of the registry index at the
// "head" of the current registry.
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
a = "0.1.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("a", "0.1.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
// Next, publish a new package and back up the copy of the registry we just
// created.
// Generate a Cargo.lock with the newer version, and then move the old copy
// of the registry back into place.
let p2 = project("foo2")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
a = "0.1.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p2.cargo("build"),
- execs().with_status(0));
+ assert_that(p2.cargo("build"), execs().with_status(0));
registry.rm_rf();
t!(fs::rename(&backup, ®istry));
- t!(fs::rename(p2.root().join("Cargo.lock"), p.root().join("Cargo.lock")));
+ t!(fs::rename(
+ p2.root().join("Cargo.lock"),
+ p.root().join("Cargo.lock")
+ ));
// Finally, build the first project again (with our newer Cargo.lock) which
// should force an update of the old registry, download the new crate, and
// then build everything again.
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[UPDATING] [..]
[DOWNLOADING] a v0.1.1 (registry `file://[..]`)
[COMPILING] a v0.1.1
[COMPILING] foo v0.5.0 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
",
- dir = p.url())));
-
+ dir = p.url()
+ )),
+ );
}
#[test]
fn fetch_downloads() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
a = "0.1.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("a", "0.1.0").publish();
- assert_that(p.cargo("fetch"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("fetch"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] a v0.1.0 (registry [..])
-"));
+",
+ ),
+ );
}
#[test]
fn update_transitive_dependency() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
a = "0.1.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("a", "0.1.0").dep("b", "*").publish();
Package::new("b", "0.1.0").publish();
- assert_that(p.cargo("fetch"),
- execs().with_status(0));
+ assert_that(p.cargo("fetch"), execs().with_status(0));
Package::new("b", "0.1.1").publish();
- assert_that(p.cargo("update").arg("-pb"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("update").arg("-pb"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[UPDATING] b v0.1.0 -> v0.1.1
-"));
+",
+ ),
+ );
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[DOWNLOADING] b v0.1.1 (registry `file://[..]`)
[COMPILING] b v0.1.1
[COMPILING] a v0.1.0
[COMPILING] foo v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
-"));
+",
+ ),
+ );
}
#[test]
fn update_backtracking_ok() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
webdriver = "0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- Package::new("webdriver", "0.1.0").dep("hyper", "0.6").publish();
- Package::new("hyper", "0.6.5").dep("openssl", "0.1")
- .dep("cookie", "0.1")
- .publish();
- Package::new("cookie", "0.1.0").dep("openssl", "0.1").publish();
+ Package::new("webdriver", "0.1.0")
+ .dep("hyper", "0.6")
+ .publish();
+ Package::new("hyper", "0.6.5")
+ .dep("openssl", "0.1")
+ .dep("cookie", "0.1")
+ .publish();
+ Package::new("cookie", "0.1.0")
+ .dep("openssl", "0.1")
+ .publish();
Package::new("openssl", "0.1.0").publish();
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0));
+ assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
Package::new("openssl", "0.1.1").publish();
- Package::new("hyper", "0.6.6").dep("openssl", "0.1.1")
- .dep("cookie", "0.1.0")
- .publish();
+ Package::new("hyper", "0.6.6")
+ .dep("openssl", "0.1.1")
+ .dep("cookie", "0.1.0")
+ .publish();
- assert_that(p.cargo("update").arg("-p").arg("hyper"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("update").arg("-p").arg("hyper"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
-"));
+",
+ ),
+ );
}
#[test]
fn update_multiple_packages() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
a = "*"
b = "*"
c = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("b", "0.1.0").publish();
Package::new("c", "0.1.0").publish();
- assert_that(p.cargo("fetch"),
- execs().with_status(0));
+ assert_that(p.cargo("fetch"), execs().with_status(0));
Package::new("a", "0.1.1").publish();
Package::new("b", "0.1.1").publish();
Package::new("c", "0.1.1").publish();
- assert_that(p.cargo("update").arg("-pa").arg("-pb"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("update").arg("-pa").arg("-pb"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[UPDATING] a v0.1.0 -> v0.1.1
[UPDATING] b v0.1.0 -> v0.1.1
-"));
+",
+ ),
+ );
- assert_that(p.cargo("update").arg("-pb").arg("-pc"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("update").arg("-pb").arg("-pc"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[UPDATING] c v0.1.0 -> v0.1.1
-"));
-
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr_contains("\
-[DOWNLOADING] a v0.1.1 (registry `file://[..]`)")
- .with_stderr_contains("\
-[DOWNLOADING] b v0.1.1 (registry `file://[..]`)")
- .with_stderr_contains("\
-[DOWNLOADING] c v0.1.1 (registry `file://[..]`)")
- .with_stderr_contains("\
-[COMPILING] a v0.1.1")
- .with_stderr_contains("\
-[COMPILING] b v0.1.1")
- .with_stderr_contains("\
-[COMPILING] c v0.1.1")
- .with_stderr_contains("\
-[COMPILING] foo v0.5.0 ([..])"));
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(
+ "\
+ [DOWNLOADING] a v0.1.1 (registry `file://[..]`)",
+ )
+ .with_stderr_contains(
+ "\
+ [DOWNLOADING] b v0.1.1 (registry `file://[..]`)",
+ )
+ .with_stderr_contains(
+ "\
+ [DOWNLOADING] c v0.1.1 (registry `file://[..]`)",
+ )
+ .with_stderr_contains(
+ "\
+ [COMPILING] a v0.1.1",
+ )
+ .with_stderr_contains(
+ "\
+ [COMPILING] b v0.1.1",
+ )
+ .with_stderr_contains(
+ "\
+ [COMPILING] c v0.1.1",
+ )
+ .with_stderr_contains(
+ "\
+ [COMPILING] foo v0.5.0 ([..])",
+ ),
+ );
}
#[test]
fn bundled_crate_in_registry() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
[dependencies]
bar = "0.1"
baz = "0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.1.0").publish();
Package::new("baz", "0.1.0")
.dep("bar", "0.1.0")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.1.0"
[dependencies]
bar = { path = "bar", version = "0.1.0" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.publish();
#[test]
fn update_same_prefix_oh_my_how_was_this_a_bug() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "ugh"
version = "0.5.0"
[dependencies]
foo = "0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
.publish();
assert_that(p.cargo("generate-lockfile"), execs().with_status(0));
- assert_that(p.cargo("update").arg("-pfoobar").arg("--precise=0.2.0"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("update").arg("-pfoobar").arg("--precise=0.2.0"),
+ execs().with_status(0),
+ );
}
#[test]
fn use_semver() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
foo = "1.2.3-alpha.0"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
#[test]
fn only_download_relevant() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
bar = "*"
[dependencies]
baz = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("bar", "0.1.0").publish();
Package::new("baz", "0.1.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] baz v0.1.0 ([..])
[COMPILING] baz v0.1.0
[COMPILING] bar v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] secs
-"));
+",
+ ),
+ );
}
#[test]
fn resolve_and_backtracking() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
foo = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("foo", "0.1.1")
- .feature_dep("bar", "0.1", &["a", "b"])
- .publish();
+ .feature_dep("bar", "0.1", &["a", "b"])
+ .publish();
Package::new("foo", "0.1.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn upstream_warnings_on_extra_verbose() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
foo = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("foo", "0.1.0")
- .file("src/lib.rs", "fn unused() {}")
- .publish();
+ .file("src/lib.rs", "fn unused() {}")
+ .publish();
- assert_that(p.cargo("build").arg("-vv"),
- execs().with_status(0).with_stderr_contains("\
+ assert_that(
+ p.cargo("build").arg("-vv"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[..]warning: function is never used[..]
-"));
+",
+ ),
+ );
}
#[test]
fn disallow_network() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
foo = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--frozen"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--frozen"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to load source for a dependency on `foo`
Caused by:
Caused by:
attempting to make an HTTP request, but --frozen was specified
-"));
+",
+ ),
+ );
}
#[test]
fn add_dep_dont_update_registry() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
baz = { path = "baz" }
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.5.0"
[dependencies]
remote = "0.3"
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
- t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#"
+ t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+ br#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
baz = { path = "baz" }
remote = "0.3"
- "#));
+ "#
+ ));
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] bar v0.5.0 ([..])
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn bump_version_dont_update_registry() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
baz = { path = "baz" }
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.5.0"
[dependencies]
remote = "0.3"
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
- t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#"
+ t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+ br#"
[project]
name = "bar"
version = "0.6.0"
[dependencies]
baz = { path = "baz" }
- "#));
+ "#
+ ));
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] bar v0.6.0 ([..])
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn old_version_req() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
remote = "0.2*"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("remote", "0.2.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
warning: parsed version requirement `0.2*` is no longer valid
Previous versions of Cargo accepted this malformed requirement,
[COMPILING] [..]
[COMPILING] [..]
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn old_version_req_upstream() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
remote = "0.3"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
Package::new("remote", "0.3.0")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "remote"
version = "0.3.0"
[dependencies]
bar = "0.2*"
- "#)
- .file("src/lib.rs", "")
- .publish();
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .publish();
Package::new("bar", "0.2.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] [..]
[DOWNLOADING] [..]
warning: parsed version requirement `0.2*` is no longer valid
[COMPILING] [..]
[COMPILING] [..]
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
#[test]
fn toml_lies_but_index_is_truth() {
Package::new("foo", "0.2.0").publish();
Package::new("bar", "0.3.0")
- .dep("foo", "0.2.0")
- .file("Cargo.toml", r#"
+ .dep("foo", "0.2.0")
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.3.0"
[dependencies]
foo = "0.1.0"
- "#)
- .file("src/lib.rs", "extern crate foo;")
- .publish();
+ "#,
+ )
+ .file("src/lib.rs", "extern crate foo;")
+ .publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dependencies]
bar = "0.3"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn vv_prints_warnings() {
Package::new("foo", "0.2.0")
- .file("src/lib.rs", r#"
+ .file(
+ "src/lib.rs",
+ r#"
#![deny(warnings)]
fn foo() {} // unused function
- "#)
- .publish();
+ "#,
+ )
+ .publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "fo"
version = "0.5.0"
[dependencies]
foo = "0.2"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-vv"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-vv"), execs().with_status(0));
}
#[test]
fn bad_and_or_malicious_packages_rejected() {
Package::new("foo", "0.2.0")
- .extra_file("foo-0.1.0/src/lib.rs", "")
- .publish();
+ .extra_file("foo-0.1.0/src/lib.rs", "")
+ .publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "fo"
version = "0.5.0"
[dependencies]
foo = "0.2"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-vv"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("-vv"),
+ execs().with_status(101).with_stderr(
+ "\
[UPDATING] [..]
[DOWNLOADING] [..]
error: unable to get packages from source
Caused by:
[..] contains a file at \"foo-0.1.0/src/lib.rs\" which isn't under \"foo-0.2.0\"
-"));
+",
+ ),
+ );
}
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use cargotest::support::registry::Package;
use cargotest::ChannelChanger;
use hamcrest::assert_that;
#[test]
fn gated() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = { package = "foo", version = "0.1" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
feature `rename-dependency` is required
consider adding `cargo-features = [\"rename-dependency\"]` to the manifest
-"));
+",
+ ),
+ );
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = { version = "0.1", package = "baz" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to parse manifest at `[..]`
Caused by:
feature `rename-dependency` is required
consider adding `cargo-features = [\"rename-dependency\"]` to the manifest
-"));
+",
+ ),
+ );
}
#[test]
Package::new("bar", "0.2.0").publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["rename-dependency"]
[project]
[dependencies]
bar = { version = "0.1.0" }
baz = { version = "0.2.0", package = "bar" }
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate bar;
extern crate baz;
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0),
+ );
}
#[test]
fn rename_with_different_names() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
cargo-features = ["rename-dependency"]
[project]
[dependencies]
baz = { path = "bar", package = "bar" }
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
extern crate baz;
- ")
- .file("bar/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[lib]
name = "random_name"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("build").masquerade_as_nightly_cargo(),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(0),
+ );
}
use cargotest::is_nightly;
use cargotest::install::{cargo_home, has_installed_exe};
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::{assert_that, existing_file, is_not};
#[test]
fn build_bin_default_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bin]]
name = "foo"
required-features = ["a"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate foo;
#[cfg(feature = "a")]
}
fn main() {}
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[cfg(feature = "a")]
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
- assert_that(p.cargo("build").arg("--no-default-features"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--no-default-features"),
+ execs().with_status(0),
+ );
- assert_that(p.cargo("build").arg("--bin=foo"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--bin=foo"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
- assert_that(p.cargo("build").arg("--bin=foo").arg("--no-default-features"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .arg("--bin=foo")
+ .arg("--no-default-features"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"`
-"));
+",
+ ),
+ );
}
#[test]
fn build_bin_arg_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bin]]
name = "foo"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--features").arg("a"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--features").arg("a"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn build_bin_multiple_required_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
name = "foo_2"
path = "src/foo_2.rs"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("src/foo_1.rs", "fn main() {}")
.file("src/foo_2.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo_1"), is_not(existing_file()));
assert_that(&p.bin("foo_2"), existing_file());
- assert_that(p.cargo("build").arg("--features").arg("c"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--features").arg("c"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo_1"), existing_file());
assert_that(&p.bin("foo_2"), existing_file());
- assert_that(p.cargo("build").arg("--no-default-features"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--no-default-features"),
+ execs().with_status(0),
+ );
}
#[test]
fn build_example_default_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[example]]
name = "foo"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("examples/foo.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--example=foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--example=foo"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("examples/foo"), existing_file());
- assert_that(p.cargo("build").arg("--example=foo").arg("--no-default-features"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .arg("--example=foo")
+ .arg("--no-default-features"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"`
-"));
+",
+ ),
+ );
}
#[test]
fn build_example_arg_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[example]]
name = "foo"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("examples/foo.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--example=foo").arg("--features").arg("a"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build")
+ .arg("--example=foo")
+ .arg("--features")
+ .arg("a"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("examples/foo"), existing_file());
}
#[test]
fn build_example_multiple_required_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[example]]
name = "foo_2"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("examples/foo_1.rs", "fn main() {}")
.file("examples/foo_2.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("--example=foo_1"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--example=foo_1"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo_1` requires the features: `b`, `c`
Consider enabling them by passing e.g. `--features=\"b c\"`
-"));
- assert_that(p.cargo("build").arg("--example=foo_2"),
- execs().with_status(0));
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build").arg("--example=foo_2"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("examples/foo_1"), is_not(existing_file()));
assert_that(&p.bin("examples/foo_2"), existing_file());
- assert_that(p.cargo("build").arg("--example=foo_1")
- .arg("--features").arg("c"),
- execs().with_status(0));
- assert_that(p.cargo("build").arg("--example=foo_2")
- .arg("--features").arg("c"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build")
+ .arg("--example=foo_1")
+ .arg("--features")
+ .arg("c"),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build")
+ .arg("--example=foo_2")
+ .arg("--features")
+ .arg("c"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("examples/foo_1"), existing_file());
assert_that(&p.bin("examples/foo_2"), existing_file());
- assert_that(p.cargo("build").arg("--example=foo_1")
- .arg("--no-default-features"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build")
+ .arg("--example=foo_1")
+ .arg("--no-default-features"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo_1` requires the features: `b`, `c`
Consider enabling them by passing e.g. `--features=\"b c\"`
-"));
- assert_that(p.cargo("build").arg("--example=foo_2")
- .arg("--no-default-features"),
- execs().with_status(101).with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build")
+ .arg("--example=foo_2")
+ .arg("--no-default-features"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo_2` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"`
-"));
+",
+ ),
+ );
}
#[test]
fn test_default_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[test]]
name = "foo"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("tests/foo.rs", "#[test]\nfn test() {}")
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test test ... ok"));
-
- assert_that(p.cargo("test").arg("--no-default-features"),
- execs().with_status(0).with_stderr(format!("\
-[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"))
- .with_stdout(""));
-
- assert_that(p.cargo("test").arg("--test=foo"),
- execs().with_status(0).with_stderr(format!("\
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test test ... ok"),
+ );
+
+ assert_that(
+ p.cargo("test").arg("--no-default-features"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"
+ ))
+ .with_stdout(""),
+ );
+
+ assert_that(
+ p.cargo("test").arg("--test=foo"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]"))
- .with_stdout_contains("test test ... ok"));
-
- assert_that(p.cargo("test").arg("--test=foo").arg("--no-default-features"),
- execs().with_status(101).with_stderr("\
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]"
+ ))
+ .with_stdout_contains("test test ... ok"),
+ );
+
+ assert_that(
+ p.cargo("test")
+ .arg("--test=foo")
+ .arg("--no-default-features"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"`
-"));
+",
+ ),
+ );
}
#[test]
fn test_arg_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[test]]
name = "foo"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("tests/foo.rs", "#[test]\nfn test() {}")
.build();
- assert_that(p.cargo("test").arg("--features").arg("a"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test").arg("--features").arg("a"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test test ... ok"));
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test test ... ok"),
+ );
}
#[test]
fn test_multiple_required_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[test]]
name = "foo_2"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("tests/foo_1.rs", "#[test]\nfn test() {}")
.file("tests/foo_2.rs", "#[test]\nfn test() {}")
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo_2-[..][EXE]", p.url()))
- .with_stdout_contains("test test ... ok"));
-
- assert_that(p.cargo("test").arg("--features").arg("c"),
- execs().with_status(0).with_stderr(format!("\
+[RUNNING] target[/]debug[/]deps[/]foo_2-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test test ... ok"),
+ );
+
+ assert_that(
+ p.cargo("test").arg("--features").arg("c"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo_1-[..][EXE]
-[RUNNING] target[/]debug[/]deps[/]foo_2-[..][EXE]", p.url()))
- .with_stdout_contains_n("test test ... ok", 2));
-
- assert_that(p.cargo("test").arg("--no-default-features"),
- execs().with_status(0).with_stderr(format!("\
-[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"))
- .with_stdout(""));
+[RUNNING] target[/]debug[/]deps[/]foo_2-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains_n("test test ... ok", 2),
+ );
+
+ assert_that(
+ p.cargo("test").arg("--no-default-features"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"
+ ))
+ .with_stdout(""),
+ );
}
#[test]
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bench]]
name = "foo"
required-features = ["a"]
- "#)
- .file("benches/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "benches/foo.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench(_: &mut test::Bencher) {
- }"#)
+ }"#,
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test bench ... bench: [..]"));
-
- assert_that(p.cargo("bench").arg("--no-default-features"),
- execs().with_status(0).with_stderr(format!("\
-[FINISHED] release [optimized] target(s) in [..]"))
- .with_stdout(""));
-
- assert_that(p.cargo("bench").arg("--bench=foo"),
- execs().with_status(0).with_stderr(format!("\
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test bench ... bench: [..]"),
+ );
+
+ assert_that(
+ p.cargo("bench").arg("--no-default-features"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
+ [FINISHED] release [optimized] target(s) in [..]"
+ ))
+ .with_stdout(""),
+ );
+
+ assert_that(
+ p.cargo("bench").arg("--bench=foo"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]"))
- .with_stdout_contains("test bench ... bench: [..]"));
-
- assert_that(p.cargo("bench").arg("--bench=foo").arg("--no-default-features"),
- execs().with_status(101).with_stderr("\
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]"
+ ))
+ .with_stdout_contains("test bench ... bench: [..]"),
+ );
+
+ assert_that(
+ p.cargo("bench")
+ .arg("--bench=foo")
+ .arg("--no-default-features"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"`
-"));
+",
+ ),
+ );
}
#[test]
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bench]]
name = "foo"
required-features = ["a"]
- "#)
- .file("benches/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "benches/foo.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench(_: &mut test::Bencher) {
- }"#)
+ }"#,
+ )
.build();
- assert_that(p.cargo("bench").arg("--features").arg("a"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("bench").arg("--features").arg("a"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test bench ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test bench ... bench: [..]"),
+ );
}
#[test]
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bench]]
name = "foo_2"
required-features = ["a"]
- "#)
- .file("benches/foo_1.rs", r#"
+ "#,
+ )
+ .file(
+ "benches/foo_1.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench(_: &mut test::Bencher) {
- }"#)
- .file("benches/foo_2.rs", r#"
+ }"#,
+ )
+ .file(
+ "benches/foo_2.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench(_: &mut test::Bencher) {
- }"#)
+ }"#,
+ )
.build();
- assert_that(p.cargo("bench"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo_2-[..][EXE]", p.url()))
- .with_stdout_contains("test bench ... bench: [..]"));
-
- assert_that(p.cargo("bench").arg("--features").arg("c"),
- execs().with_status(0).with_stderr(format!("\
+[RUNNING] target[/]release[/]deps[/]foo_2-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test bench ... bench: [..]"),
+ );
+
+ assert_that(
+ p.cargo("bench").arg("--features").arg("c"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo_1-[..][EXE]
-[RUNNING] target[/]release[/]deps[/]foo_2-[..][EXE]", p.url()))
- .with_stdout_contains_n("test bench ... bench: [..]", 2));
-
- assert_that(p.cargo("bench").arg("--no-default-features"),
- execs().with_status(0).with_stderr(format!("\
-[FINISHED] release [optimized] target(s) in [..]"))
- .with_stdout(""));
+[RUNNING] target[/]release[/]deps[/]foo_2-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains_n("test bench ... bench: [..]", 2),
+ );
+
+ assert_that(
+ p.cargo("bench").arg("--no-default-features"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
+ [FINISHED] release [optimized] target(s) in [..]"
+ ))
+ .with_stdout(""),
+ );
}
#[test]
fn install_default_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[example]]
name = "foo"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("examples/foo.rs", "fn main() {}")
.build();
- assert_that(p.cargo("install"),
- execs().with_status(0));
+ assert_that(p.cargo("install"), execs().with_status(0));
assert_that(cargo_home(), has_installed_exe("foo"));
- assert_that(p.cargo("uninstall").arg("foo"),
- execs().with_status(0));
+ assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0));
- assert_that(p.cargo("install").arg("--no-default-features"),
- execs().with_status(101).with_stderr(format!("\
+ assert_that(
+ p.cargo("install").arg("--no-default-features"),
+ execs().with_status(101).with_stderr(format!(
+ "\
[INSTALLING] foo v0.0.1 ([..])
[FINISHED] release [optimized] target(s) in [..]
[ERROR] no binaries are available for install using the selected features
-")));
+"
+ )),
+ );
assert_that(cargo_home(), is_not(has_installed_exe("foo")));
- assert_that(p.cargo("install").arg("--bin=foo"),
- execs().with_status(0));
+ assert_that(p.cargo("install").arg("--bin=foo"), execs().with_status(0));
assert_that(cargo_home(), has_installed_exe("foo"));
- assert_that(p.cargo("uninstall").arg("foo"),
- execs().with_status(0));
-
- assert_that(p.cargo("install").arg("--bin=foo").arg("--no-default-features"),
- execs().with_status(101).with_stderr(format!("\
+ assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0));
+
+ assert_that(
+ p.cargo("install")
+ .arg("--bin=foo")
+ .arg("--no-default-features"),
+ execs().with_status(101).with_stderr(format!(
+ "\
[INSTALLING] foo v0.0.1 ([..])
[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \
`[..]target`
Caused by:
target `foo` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"`
-")));
+"
+ )),
+ );
assert_that(cargo_home(), is_not(has_installed_exe("foo")));
- assert_that(p.cargo("install").arg("--example=foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("install").arg("--example=foo"),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
- assert_that(p.cargo("uninstall").arg("foo"),
- execs().with_status(0));
-
- assert_that(p.cargo("install").arg("--example=foo").arg("--no-default-features"),
- execs().with_status(101).with_stderr(format!("\
+ assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0));
+
+ assert_that(
+ p.cargo("install")
+ .arg("--example=foo")
+ .arg("--no-default-features"),
+ execs().with_status(101).with_stderr(format!(
+ "\
[INSTALLING] foo v0.0.1 ([..])
[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \
`[..]target`
Caused by:
target `foo` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"`
-")));
+"
+ )),
+ );
assert_that(cargo_home(), is_not(has_installed_exe("foo")));
}
#[test]
fn install_arg_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bin]]
name = "foo"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("install").arg("--features").arg("a"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("install").arg("--features").arg("a"),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
- assert_that(p.cargo("uninstall").arg("foo"),
- execs().with_status(0));
+ assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0));
}
#[test]
fn install_multiple_required_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
name = "foo_2"
path = "src/foo_2.rs"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("src/foo_1.rs", "fn main() {}")
.file("src/foo_2.rs", "fn main() {}")
.build();
- assert_that(p.cargo("install"),
- execs().with_status(0));
+ assert_that(p.cargo("install"), execs().with_status(0));
assert_that(cargo_home(), is_not(has_installed_exe("foo_1")));
assert_that(cargo_home(), has_installed_exe("foo_2"));
- assert_that(p.cargo("uninstall").arg("foo"),
- execs().with_status(0));
+ assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0));
- assert_that(p.cargo("install").arg("--features").arg("c"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("install").arg("--features").arg("c"),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo_1"));
assert_that(cargo_home(), has_installed_exe("foo_2"));
- assert_that(p.cargo("uninstall").arg("foo"),
- execs().with_status(0));
+ assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0));
- assert_that(p.cargo("install").arg("--no-default-features"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("install").arg("--no-default-features"),
+ execs().with_status(101).with_stderr(
+ "\
[INSTALLING] foo v0.0.1 ([..])
[FINISHED] release [optimized] target(s) in [..]
[ERROR] no binaries are available for install using the selected features
-"));
+",
+ ),
+ );
assert_that(cargo_home(), is_not(has_installed_exe("foo_1")));
assert_that(cargo_home(), is_not(has_installed_exe("foo_2")));
}
#[test]
fn dep_feature_in_toml() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bench]]
name = "foo"
required-features = ["bar/a"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("examples/foo.rs", "fn main() {}")
.file("tests/foo.rs", "#[test]\nfn test() {}")
- .file("benches/foo.rs", r#"
+ .file(
+ "benches/foo.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench(_: &mut test::Bencher) {
- }"#)
- .file("bar/Cargo.toml", r#"
+ }"#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[features]
a = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
// bin
- assert_that(p.cargo("build").arg("--bin=foo"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--bin=foo"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
// example
- assert_that(p.cargo("build").arg("--example=foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--example=foo"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("examples/foo"), existing_file());
// test
- assert_that(p.cargo("test").arg("--test=foo"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test").arg("--test=foo"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test test ... ok"));
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test test ... ok"),
+ );
// bench
if is_nightly() {
- assert_that(p.cargo("bench").arg("--bench=foo"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("bench").arg("--bench=foo"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] bar v0.0.1 ({0}/bar)
[COMPILING] foo v0.0.1 ({0})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test bench ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test bench ... bench: [..]"),
+ );
}
// install
- assert_that(p.cargo("install"),
- execs().with_status(0));
+ assert_that(p.cargo("install"), execs().with_status(0));
assert_that(cargo_home(), has_installed_exe("foo"));
- assert_that(p.cargo("uninstall").arg("foo"),
- execs().with_status(0));
+ assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0));
}
#[test]
fn dep_feature_in_cmd_line() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bench]]
name = "foo"
required-features = ["bar/a"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("examples/foo.rs", "fn main() {}")
.file("tests/foo.rs", "#[test]\nfn test() {}")
- .file("benches/foo.rs", r#"
+ .file(
+ "benches/foo.rs",
+ r#"
#![feature(test)]
extern crate test;
#[bench]
fn bench(_: &mut test::Bencher) {
- }"#)
- .file("bar/Cargo.toml", r#"
+ }"#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[features]
a = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
// bin
- assert_that(p.cargo("build").arg("--bin=foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--bin=foo"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo` requires the features: `bar/a`
Consider enabling them by passing e.g. `--features=\"bar/a\"`
-"));
-
- assert_that(p.cargo("build").arg("--bin=foo").arg("--features").arg("bar/a"),
- execs().with_status(0));
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build")
+ .arg("--bin=foo")
+ .arg("--features")
+ .arg("bar/a"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
// example
- assert_that(p.cargo("build").arg("--example=foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--example=foo"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo` requires the features: `bar/a`
Consider enabling them by passing e.g. `--features=\"bar/a\"`
-"));
-
- assert_that(p.cargo("build").arg("--example=foo").arg("--features").arg("bar/a"),
- execs().with_status(0));
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build")
+ .arg("--example=foo")
+ .arg("--features")
+ .arg("bar/a"),
+ execs().with_status(0),
+ );
assert_that(&p.bin("examples/foo"), existing_file());
// test
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr(format!("\
-[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"))
- .with_stdout(""));
-
- assert_that(p.cargo("test").arg("--test=foo").arg("--features").arg("bar/a"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"
+ ))
+ .with_stdout(""),
+ );
+
+ assert_that(
+ p.cargo("test")
+ .arg("--test=foo")
+ .arg("--features")
+ .arg("bar/a"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test test ... ok"));
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test test ... ok"),
+ );
// bench
if is_nightly() {
- assert_that(p.cargo("bench"),
- execs().with_status(0).with_stderr(format!("\
-[FINISHED] release [optimized] target(s) in [..]"))
- .with_stdout(""));
-
- assert_that(p.cargo("bench").arg("--bench=foo").arg("--features").arg("bar/a"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
+ [FINISHED] release [optimized] target(s) in [..]"
+ ))
+ .with_stdout(""),
+ );
+
+ assert_that(
+ p.cargo("bench")
+ .arg("--bench=foo")
+ .arg("--features")
+ .arg("bar/a"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] bar v0.0.1 ({0}/bar)
[COMPILING] foo v0.0.1 ({0})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test bench ... bench: [..]"));
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test bench ... bench: [..]"),
+ );
}
// install
- assert_that(p.cargo("install"),
- execs().with_status(101).with_stderr(format!("\
+ assert_that(
+ p.cargo("install"),
+ execs().with_status(101).with_stderr(format!(
+ "\
[INSTALLING] foo v0.0.1 ([..])
[FINISHED] release [optimized] target(s) in [..]
[ERROR] no binaries are available for install using the selected features
-")));
+"
+ )),
+ );
assert_that(cargo_home(), is_not(has_installed_exe("foo")));
- assert_that(p.cargo("install").arg("--features").arg("bar/a"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("install").arg("--features").arg("bar/a"),
+ execs().with_status(0),
+ );
assert_that(cargo_home(), has_installed_exe("foo"));
- assert_that(p.cargo("uninstall").arg("foo"),
- execs().with_status(0));
+ assert_that(p.cargo("uninstall").arg("foo"), execs().with_status(0));
}
#[test]
fn test_skips_compiling_bin_with_missing_required_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
name = "bin_foo"
path = "src/bin/foo.rs"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("src/bin/foo.rs", "extern crate bar; fn main() {}")
.file("tests/foo.rs", "")
.file("benches/foo.rs", "")
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("running 0 tests"));
-
- assert_that(p.cargo("test").arg("--features").arg("a").arg("-j").arg("1"),
- execs().with_status(101).with_stderr_contains(format!("\
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("running 0 tests"),
+ );
+
+ assert_that(
+ p.cargo("test")
+ .arg("--features")
+ .arg("a")
+ .arg("-j")
+ .arg("1"),
+ execs().with_status(101).with_stderr_contains(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
-error[E0463]: can't find crate for `bar`", p.url())));
+error[E0463]: can't find crate for `bar`",
+ p.url()
+ )),
+ );
if is_nightly() {
- assert_that(p.cargo("bench"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("bench"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
-[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("running 0 tests"));
-
- assert_that(p.cargo("bench").arg("--features").arg("a").arg("-j").arg("1"),
- execs().with_status(101).with_stderr_contains(format!("\
+[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("running 0 tests"),
+ );
+
+ assert_that(
+ p.cargo("bench")
+ .arg("--features")
+ .arg("a")
+ .arg("-j")
+ .arg("1"),
+ execs().with_status(101).with_stderr_contains(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
-error[E0463]: can't find crate for `bar`", p.url())));
+error[E0463]: can't find crate for `bar`",
+ p.url()
+ )),
+ );
}
}
#[test]
fn run_default() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bin]]
name = "foo"
required-features = ["a"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "extern crate foo; fn main() {}")
.build();
- assert_that(p.cargo("run"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("run"),
+ execs().with_status(101).with_stderr(
+ "\
error: target `foo` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"`
-"));
-
- assert_that(p.cargo("run").arg("--features").arg("a"),
- execs().with_status(0));
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("run").arg("--features").arg("a"),
+ execs().with_status(0),
+ );
}
#[test]
fn run_default_multiple_required_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
name = "foo2"
path = "src/foo2.rs"
required-features = ["b"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/foo1.rs", "extern crate foo; fn main() {}")
.file("src/foo2.rs", "extern crate foo; fn main() {}")
.build();
- assert_that(p.cargo("run"),
- execs().with_status(101).with_stderr("\
-error: `cargo run` requires that a project only have one executable; \
-use the `--bin` option to specify which one to run\navailable binaries: foo1, foo2"));
+ assert_that(
+ p.cargo("run"),
+ execs().with_status(101).with_stderr(
+ "\
+ error: `cargo run` requires that a project only have one executable; \
+ use the `--bin` option to specify which one to run\navailable binaries: foo1, foo2",
+ ),
+ );
}
use hamcrest::{assert_that, contains, is_not};
-use cargo::core::source::{SourceId, GitReference};
+use cargo::core::source::{GitReference, SourceId};
use cargo::core::dependency::Kind::{self, Development};
-use cargo::core::{Dependency, PackageId, Summary, Registry};
+use cargo::core::{Dependency, PackageId, Registry, Summary};
use cargo::util::{CargoResult, ToUrl};
use cargo::core::resolver::{self, Method};
-fn resolve(pkg: &PackageId, deps: Vec<Dependency>, registry: &[Summary])
- -> CargoResult<Vec<PackageId>>
-{
+fn resolve(
+ pkg: &PackageId,
+ deps: Vec<Dependency>,
+ registry: &[Summary],
+) -> CargoResult<Vec<PackageId>> {
struct MyRegistry<'a>(&'a [Summary]);
impl<'a> Registry for MyRegistry<'a> {
- fn query(&mut self,
- dep: &Dependency,
- f: &mut FnMut(Summary)) -> CargoResult<()> {
+ fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
for summary in self.0.iter() {
if dep.matches(summary) {
f(summary.clone());
}
Ok(())
}
- fn supports_checksums(&self) -> bool { false }
- fn requires_precise(&self) -> bool { false }
+ fn supports_checksums(&self) -> bool {
+ false
+ }
+ fn requires_precise(&self) -> bool {
+ false
+ }
}
let mut registry = MyRegistry(registry);
let summary = Summary::new(pkg.clone(), deps, BTreeMap::new(), None).unwrap();
}
fn pkg(name: &str) -> Summary {
- let link = if name.ends_with("-sys") {Some(name.to_string())} else {None};
+ let link = if name.ends_with("-sys") {
+ Some(name.to_string())
+ } else {
+ None
+ };
Summary::new(pkg_id(name), Vec::new(), BTreeMap::new(), link).unwrap()
}
}
fn pkg_loc(name: &str, loc: &str) -> Summary {
- let link = if name.ends_with("-sys") {Some(name.to_string())} else {None};
+ let link = if name.ends_with("-sys") {
+ Some(name.to_string())
+ } else {
+ None
+ };
Summary::new(pkg_id_loc(name, loc), Vec::new(), BTreeMap::new(), link).unwrap()
}
-fn dep(name: &str) -> Dependency { dep_req(name, "1.0.0") }
+fn dep(name: &str) -> Dependency {
+ dep_req(name, "1.0.0")
+}
fn dep_req(name: &str, req: &str) -> Dependency {
let url = "http://example.com".to_url().unwrap();
let source_id = SourceId::for_registry(&url).unwrap();
}
fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {
- names.iter()
- .map(|&(name, loc)| pkg_id_loc(name, loc)).collect()
+ names
+ .iter()
+ .map(|&(name, loc)| pkg_id_loc(name, loc))
+ .collect()
}
#[test]
fn test_resolving_empty_dependency_list() {
- let res = resolve(&pkg_id("root"), Vec::new(),
- ®istry(vec![])).unwrap();
+ let res = resolve(&pkg_id("root"), Vec::new(), ®istry(vec![])).unwrap();
assert_eq!(res, names(&["root"]));
}
#[test]
fn test_resolving_multiple_deps() {
let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]);
- let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("baz")],
- ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("baz")], ®).unwrap();
assert_same(&res, &names(&["root", "foo", "baz"]));
}
#[test]
fn test_resolving_common_transitive_deps() {
let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]);
- let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("bar")],
- ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("bar")], ®).unwrap();
assert_that(&res, contains(names(&["root", "foo", "bar"])));
}
#[test]
fn test_resolving_with_same_name() {
- let list = vec![pkg_loc("foo", "http://first.example.com"),
- pkg_loc("bar", "http://second.example.com")];
+ let list = vec![
+ pkg_loc("foo", "http://first.example.com"),
+ pkg_loc("bar", "http://second.example.com"),
+ ];
let reg = registry(list);
- let res = resolve(&pkg_id("root"),
- vec![dep_loc("foo", "http://first.example.com"),
- dep_loc("bar", "http://second.example.com")],
- ®).unwrap();
-
- let mut names = loc_names(&[("foo", "http://first.example.com"),
- ("bar", "http://second.example.com")]);
+ let res = resolve(
+ &pkg_id("root"),
+ vec![
+ dep_loc("foo", "http://first.example.com"),
+ dep_loc("bar", "http://second.example.com"),
+ ],
+ ®,
+ ).unwrap();
+
+ let mut names = loc_names(&[
+ ("foo", "http://first.example.com"),
+ ("bar", "http://second.example.com"),
+ ]);
names.push(pkg_id("root"));
assert_same(&res, &names);
pkg!("foo" => ["bar", dep_kind("baz", Development)]),
pkg!("baz" => ["bat", dep_kind("bam", Development)]),
pkg!("bar"),
- pkg!("bat")
+ pkg!("bat"),
]);
- let res = resolve(&pkg_id("root"),
- vec![dep("foo"), dep_kind("baz", Development)],
- ®).unwrap();
+ let res = resolve(
+ &pkg_id("root"),
+ vec![dep("foo"), dep_kind("baz", Development)],
+ ®,
+ ).unwrap();
assert_that(&res, contains(names(&["root", "foo", "bar", "baz"])));
}
#[test]
fn resolving_with_many_versions() {
- let reg = registry(vec![
- pkg!(("foo", "1.0.1")),
- pkg!(("foo", "1.0.2")),
- ]);
+ let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
let res = resolve(&pkg_id("root"), vec![dep("foo")], ®).unwrap();
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.2")])));
+ assert_that(
+ &res,
+ contains(names(&[("root", "1.0.0"), ("foo", "1.0.2")])),
+ );
}
#[test]
fn resolving_with_specific_version() {
- let reg = registry(vec![
- pkg!(("foo", "1.0.1")),
- pkg!(("foo", "1.0.2")),
- ]);
+ let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
- let res = resolve(&pkg_id("root"), vec![dep_req("foo", "=1.0.1")],
- ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep_req("foo", "=1.0.1")], ®).unwrap();
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.1")])));
+ assert_that(
+ &res,
+ contains(names(&[("root", "1.0.0"), ("foo", "1.0.1")])),
+ );
}
#[test]
pkg!("bar" => [dep_req("util", ">=1.0.1")]),
]);
- let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")],
- ®).unwrap();
-
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.0"),
- ("bar", "1.0.0"),
- ("util", "1.2.2")])));
+ let res = resolve(
+ &pkg_id("root"),
+ vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")],
+ ®,
+ ).unwrap();
+
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("util", "1.2.2"),
+ ])),
+ );
assert_that(&res, is_not(contains(names(&[("util", "1.0.1")]))));
assert_that(&res, is_not(contains(names(&[("util", "1.1.1")]))));
}
pkg!("bar" => [dep_req("foo", "=1.0.2")]),
]);
- assert!(resolve(&pkg_id("root"), vec![
- dep_req("foo", "=1.0.1"),
- dep("bar"),
- ], ®).is_err());
+ assert!(
+ resolve(
+ &pkg_id("root"),
+ vec![dep_req("foo", "=1.0.1"), dep("bar")],
+ ®
+ ).is_err()
+ );
}
#[test]
pkg!("baz"),
]);
- let res = resolve(&pkg_id("root"), vec![
- dep_req("foo", "^1"),
- ], ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep_req("foo", "^1")], ®).unwrap();
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.1"),
- ("baz", "1.0.0")])));
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.1"),
+ ("baz", "1.0.0"),
+ ])),
+ );
}
#[test]
pkg!("bar"),
]);
- let res = resolve(&pkg_id("root"), vec![
- dep_req("foo", "^1"),
- ], ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep_req("foo", "^1")], ®).unwrap();
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.1"),
- ("bar", "1.0.0")])));
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.1"),
+ ("bar", "1.0.0"),
+ ])),
+ );
}
#[test]
pkg!(("foo", "2.0.0")),
pkg!(("foo", "0.1.0")),
pkg!(("foo", "0.2.0")),
-
pkg!("bar" => ["d1", "d2", "d3", "d4"]),
pkg!("d1" => [dep_req("foo", "1")]),
pkg!("d2" => [dep_req("foo", "2")]),
pkg!("d4" => [dep_req("foo", "0.2")]),
]);
- let res = resolve(&pkg_id("root"), vec![
- dep("bar"),
- ], ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep("bar")], ®).unwrap();
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.0"),
- ("foo", "2.0.0"),
- ("foo", "0.1.0"),
- ("foo", "0.2.0"),
- ("d1", "1.0.0"),
- ("d2", "1.0.0"),
- ("d3", "1.0.0"),
- ("d4", "1.0.0"),
- ("bar", "1.0.0")])));
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("foo", "2.0.0"),
+ ("foo", "0.1.0"),
+ ("foo", "0.2.0"),
+ ("d1", "1.0.0"),
+ ("d2", "1.0.0"),
+ ("d3", "1.0.0"),
+ ("d4", "1.0.0"),
+ ("bar", "1.0.0"),
+ ])),
+ );
}
#[test]
let reg = registry(vec![
pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
-
pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
dep_req("other", "1")]),
pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
-
pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
pkg!(("baz", "1.0.1")),
-
pkg!(("dep_req", "1.0.0")),
pkg!(("dep_req", "2.0.0")),
]);
- let res = resolve(&pkg_id("root"), vec![
- dep_req("foo", "1"),
- ], ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®).unwrap();
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.0"),
- ("bar", "2.0.0"),
- ("baz", "1.0.1")])));
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "2.0.0"),
+ ("baz", "1.0.1"),
+ ])),
+ );
}
#[test]
pkg!(("r", "1.0.0") => [dep_req("l-sys", "0.9"), dep_req("l", "0.9")]),
]);
- let res = resolve(&pkg_id("root"), vec![
- dep_req("d", "1"),
- dep_req("r", "1"),
- ], ®).unwrap();
-
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("d", "1.0.0"),
- ("r", "1.0.0"),
- ("l-sys", "0.9.1"),
- ("l", "0.9.1"),
- ("l", "0.10.0")])));
+ let res = resolve(
+ &pkg_id("root"),
+ vec![dep_req("d", "1"), dep_req("r", "1")],
+ ®,
+ ).unwrap();
+
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("d", "1.0.0"),
+ ("r", "1.0.0"),
+ ("l-sys", "0.9.1"),
+ ("l", "0.9.1"),
+ ("l", "0.10.0"),
+ ])),
+ );
}
#[test]
let mut reglist = vec![
pkg!(("foo", "1.0.0") => [dep_req("bar", "1.0"),
dep_req("constrained", "=1.0.0")]),
-
pkg!(("bar", "1.0.0") => [dep_req("backtrack_trap1", "1.0.2"),
dep_req("backtrack_trap2", "1.0.2"),
dep_req("constrained", "1.0.0")]),
const NUM_BARS_AND_TRAPS: usize = 50; // minimum 2
for i in 1..NUM_BARS_AND_TRAPS {
let vsn = format!("1.0.{}", i);
- reglist.push(pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"),
+ reglist.push(
+ pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"),
dep_req("backtrack_trap2", "1.0.2"),
- dep_req("constrained", "1.0.1")]));
+ dep_req("constrained", "1.0.1")]),
+ );
reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
reglist.push(pkg!(("constrained", vsn.clone())));
}
let reg = registry(reglist);
- let res = resolve(&pkg_id("root"), vec![
- dep_req("foo", "1"),
- ], ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®).unwrap();
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.0"),
- ("bar", "1.0.0"),
- ("constrained", "1.0.0")])));
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("constrained", "1.0.0"),
+ ])),
+ );
}
#[test]
let reg = registry(reglist.clone());
- let res = resolve(&pkg_id("root"), vec![
- dep_req("level0", "*"),
- ], ®);
+ let res = resolve(&pkg_id("root"), vec![dep_req("level0", "*")], ®);
assert!(res.is_err());
let reg = registry(reglist.clone());
- let res = resolve(&pkg_id("root"), vec![
- dep_req("level0", "*"),
- ], ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep_req("level0", "*")], ®).unwrap();
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("level0", "1.0.0")])));
+ assert_that(
+ &res,
+ contains(names(&[("root", "1.0.0"), ("level0", "1.0.0")])),
+ );
// Make sure we have not special case no candidates.
reglist.push(pkg!(("constrained", "1.1.0")));
reglist.push(pkg!(("constrained", "1.0.0")));
- reglist.push(pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("constrained", "=1.0.0")]));
+ reglist.push(
+ pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("constrained", "=1.0.0")]),
+ );
let reg = registry(reglist.clone());
- let res = resolve(&pkg_id("root"), vec![
- dep_req("level0", "*"),
- dep_req("constrained", "*"),
- ], ®).unwrap();
-
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("level0", "1.0.0"),
- ("constrained", "1.1.0")])));
+ let res = resolve(
+ &pkg_id("root"),
+ vec![dep_req("level0", "*"), dep_req("constrained", "*")],
+ ®,
+ ).unwrap();
+
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("level0", "1.0.0"),
+ ("constrained", "1.1.0"),
+ ])),
+ );
let reg = registry(reglist.clone());
- let res = resolve(&pkg_id("root"), vec![
- dep_req("level0", "1.0.1"),
- dep_req("constrained", "*"),
- ], ®).unwrap();
-
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- (format!("level{}", DEPTH).as_str(), "1.0.0"),
- ("constrained", "1.0.0")])));
+ let res = resolve(
+ &pkg_id("root"),
+ vec![dep_req("level0", "1.0.1"), dep_req("constrained", "*")],
+ ®,
+ ).unwrap();
+
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ (format!("level{}", DEPTH).as_str(), "1.0.0"),
+ ("constrained", "1.0.0"),
+ ])),
+ );
let reg = registry(reglist.clone());
- let res = resolve(&pkg_id("root"), vec![
- dep_req("level0", "1.0.1"),
- dep_req("constrained", "1.1.0"),
- ], ®);
+ let res = resolve(
+ &pkg_id("root"),
+ vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")],
+ ®,
+ );
assert!(res.is_err());
}
}
let reg = registry(reglist);
- let res = resolve(&pkg_id("root"), vec![
- dep_req("foo", "1"),
- ], ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®).unwrap();
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.0"),
- ("bar", "1.0.0"),
- ("constrained", "1.0.60")])));
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ("constrained", "1.0.60"),
+ ])),
+ );
}
#[test]
pkg!(("A", "1.0.0") => [dep_req("B", "1.0"),
dep_req("C", "1.0"),
dep_req("D", "1.0.100")]),
-
pkg!(("B", "1.0.0") => [dep_req("C", ">=1.0.0")]),
pkg!(("B", "1.0.1") => [dep_req("C", ">=1.0.1")]),
-
pkg!(("C", "1.0.0") => [dep_req("D", "1.0.0")]),
pkg!(("C", "1.0.1") => [dep_req("D", ">=1.0.1,<1.0.100")]),
pkg!(("C", "1.0.2") => [dep_req("D", ">=1.0.2,<1.0.100")]),
-
pkg!(("D", "1.0.0")),
pkg!(("D", "1.0.1")),
pkg!(("D", "1.0.2")),
pkg!(("D", "1.0.105")),
]);
- let res = resolve(&pkg_id("root"), vec![
- dep_req("A", "1"),
- ], ®).unwrap();
+ let res = resolve(&pkg_id("root"), vec![dep_req("A", "1")], ®).unwrap();
- assert_that(&res, contains(names(&[("A", "1.0.0"),
- ("B", "1.0.0"),
- ("C", "1.0.0"),
- ("D", "1.0.105")])));
+ assert_that(
+ &res,
+ contains(names(&[
+ ("A", "1.0.0"),
+ ("B", "1.0.0"),
+ ("C", "1.0.0"),
+ ("D", "1.0.105"),
+ ])),
+ );
}
#[test]
fn resolving_but_no_exists() {
- let reg = registry(vec![
- ]);
+ let reg = registry(vec![]);
- let res = resolve(&pkg_id("root"), vec![
- dep_req("foo", "1"),
- ], ®);
+ let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®);
assert!(res.is_err());
- assert_eq!(res.err().unwrap().to_string(), "\
-no matching package named `foo` found\n\
-location searched: registry `http://example.com/`\n\
-required by package `root v1.0.0 (registry `http://example.com/`)`\
-");
+ assert_eq!(
+ res.err().unwrap().to_string(),
+ "\
+ no matching package named `foo` found\n\
+ location searched: registry `http://example.com/`\n\
+ required by package `root v1.0.0 (registry `http://example.com/`)`\
+ "
+ );
}
#[test]
fn resolving_cycle() {
- let reg = registry(vec![
- pkg!("foo" => ["foo"]),
- ]);
+ let reg = registry(vec![pkg!("foo" => ["foo"])]);
- let _ = resolve(&pkg_id("root"), vec![
- dep_req("foo", "1"),
- ], ®);
+ let _ = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], ®);
}
#[test]
let reg = registry(vec![
pkg!(("foo", "1.0.1")),
pkg!(("foo", "1.0.0")),
-
pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]),
]);
- let res = resolve(&pkg_id("root"), vec![
- dep_req("bar", "1"),
- dep_req("foo", "=1.0.0"),
- ], ®).unwrap();
-
- assert_that(&res, contains(names(&[("root", "1.0.0"),
- ("foo", "1.0.0"),
- ("bar", "1.0.0")])));
+ let res = resolve(
+ &pkg_id("root"),
+ vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")],
+ ®,
+ ).unwrap();
+
+ assert_that(
+ &res,
+ contains(names(&[
+ ("root", "1.0.0"),
+ ("foo", "1.0.0"),
+ ("bar", "1.0.0"),
+ ])),
+ );
}
use cargo::util::paths::dylib_path_envvar;
-use cargotest::support::{project, execs, path2url};
+use cargotest::support::{execs, project, path2url};
use hamcrest::{assert_that, existing_file};
#[test]
fn simple() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { println!("hello"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] `target[/]debug[/]foo[EXE]`", dir = path2url(p.root())))
- .with_stdout("\
+[RUNNING] `target[/]debug[/]foo[EXE]`",
+ dir = path2url(p.root())
+ ))
+ .with_stdout(
+ "\
hello
-"));
+",
+ ),
+ );
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn simple_quiet() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { println!("hello"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("-q"),
- execs().with_status(0)
- .with_stdout("hello")
+ assert_that(
+ p.cargo("run").arg("-q"),
+ execs().with_status(0).with_stdout("hello"),
);
- assert_that(p.cargo("run").arg("--quiet"),
- execs().with_status(0)
- .with_stdout("hello")
+ assert_that(
+ p.cargo("run").arg("--quiet"),
+ execs().with_status(0).with_stdout("hello"),
);
}
#[test]
fn simple_quiet_and_verbose() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { println!("hello"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("-q").arg("-v"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("run").arg("-q").arg("-v"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] cannot set both --verbose and --quiet
-"));
+",
+ ),
+ );
}
#[test]
fn quiet_and_verbose_config() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[term]
verbose = true
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { println!("hello"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("-q"),
- execs().with_status(0));
+ assert_that(p.cargo("run").arg("-q"), execs().with_status(0));
}
#[test]
fn simple_with_args() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
assert_eq!(std::env::args().nth(1).unwrap(), "hello");
assert_eq!(std::env::args().nth(2).unwrap(), "world");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("hello").arg("world"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("run").arg("hello").arg("world"),
+ execs().with_status(0),
+ );
}
#[test]
fn exit_code() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { std::process::exit(2); }
- "#)
+ "#,
+ )
.build();
- let mut output = String::from("\
+ let mut output = String::from(
+ "\
[COMPILING] foo v0.0.1 (file[..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[..]`
-");
+",
+ );
if !cfg!(unix) {
- output.push_str("\
+ output.push_str(
+ "\
[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2)
-");
+",
+ );
}
- assert_that(p.cargo("run"),
- execs().with_status(2).with_stderr(output));
+ assert_that(p.cargo("run"), execs().with_status(2).with_stderr(output));
}
#[test]
fn exit_code_verbose() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { std::process::exit(2); }
- "#)
+ "#,
+ )
.build();
- let mut output = String::from("\
+ let mut output = String::from(
+ "\
[COMPILING] foo v0.0.1 (file[..])
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[..]`
-");
+",
+ );
if !cfg!(unix) {
- output.push_str("\
+ output.push_str(
+ "\
[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2)
-");
+",
+ );
}
- assert_that(p.cargo("run").arg("-v"),
- execs().with_status(2).with_stderr(output));
+ assert_that(
+ p.cargo("run").arg("-v"),
+ execs().with_status(2).with_stderr(output),
+ );
}
#[test]
fn no_main_file() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("run"),
- execs().with_status(101)
- .with_stderr("[ERROR] a bin target must be available \
- for `cargo run`\n"));
+ assert_that(
+ p.cargo("run"),
+ execs().with_status(101).with_stderr(
+ "[ERROR] a bin target must be available \
+ for `cargo run`\n",
+ ),
+ );
}
#[test]
fn too_many_bins() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "")
.file("src/bin/b.rs", "")
.build();
- assert_that(p.cargo("run"),
- execs().with_status(101)
- .with_stderr("[ERROR] `cargo run` requires that a project only \
- have one executable; use the `--bin` option \
- to specify which one to run\navailable binaries: [..]\n"));
+ assert_that(
+ p.cargo("run"),
+ execs().with_status(101).with_stderr(
+ "[ERROR] `cargo run` requires that a project only \
+ have one executable; use the `--bin` option \
+ to specify which one to run\navailable binaries: [..]\n",
+ ),
+ );
}
#[test]
fn specify_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("src/bin/a.rs", r#"
+ .file(
+ "src/bin/a.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate foo;
fn main() { println!("hello a.rs"); }
- "#)
- .file("src/bin/b.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/b.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate foo;
fn main() { println!("hello b.rs"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--bin").arg("a").arg("-v"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("run").arg("--bin").arg("a").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[RUNNING] `rustc [..] src[/]lib.rs [..]`
[RUNNING] `rustc [..] src[/]bin[/]a.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] `target[/]debug[/]a[EXE]`", dir = path2url(p.root())))
- .with_stdout("\
+[RUNNING] `target[/]debug[/]a[EXE]`",
+ dir = path2url(p.root())
+ ))
+ .with_stdout(
+ "\
hello a.rs
-"));
+",
+ ),
+ );
- assert_that(p.cargo("run").arg("--bin").arg("b").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("run").arg("--bin").arg("b").arg("-v"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] src[/]bin[/]b.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] `target[/]debug[/]b[EXE]`")
- .with_stdout("\
+[RUNNING] `target[/]debug[/]b[EXE]`",
+ )
+ .with_stdout(
+ "\
hello b.rs
-"));
+",
+ ),
+ );
}
#[test]
fn run_example() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("examples/a.rs", r#"
+ .file(
+ "examples/a.rs",
+ r#"
fn main() { println!("example"); }
- "#)
- .file("src/bin/a.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/a.rs",
+ r#"
fn main() { println!("bin"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--example").arg("a"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("run").arg("--example").arg("a"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] `target[/]debug[/]examples[/]a[EXE]`", dir = path2url(p.root())))
- .with_stdout("\
+[RUNNING] `target[/]debug[/]examples[/]a[EXE]`",
+ dir = path2url(p.root())
+ ))
+ .with_stdout(
+ "\
example
-"));
+",
+ ),
+ );
}
#[test]
fn run_bins() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("examples/a.rs", r#"
+ .file(
+ "examples/a.rs",
+ r#"
fn main() { println!("example"); }
- "#)
- .file("src/bin/a.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/a.rs",
+ r#"
fn main() { println!("bin"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--bins"),
- execs().with_status(1)
- .with_stderr_contains("\
-error: Found argument '--bins' which wasn't expected, or isn't valid in this context"));
+ assert_that(
+ p.cargo("run").arg("--bins"),
+ execs().with_status(1).with_stderr_contains(
+ "\
+ error: Found argument '--bins' which wasn't expected, or isn't valid in this context",
+ ),
+ );
}
#[test]
fn run_with_filename() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("src/bin/a.rs", r#"
+ .file(
+ "src/bin/a.rs",
+ r#"
extern crate foo;
fn main() { println!("hello a.rs"); }
- "#)
- .file("examples/a.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/a.rs",
+ r#"
fn main() { println!("example"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--bin").arg("bin.rs"),
- execs().with_status(101).with_stderr("\
-[ERROR] no bin target named `bin.rs`"));
+ assert_that(
+ p.cargo("run").arg("--bin").arg("bin.rs"),
+ execs().with_status(101).with_stderr(
+ "\
+ [ERROR] no bin target named `bin.rs`",
+ ),
+ );
- assert_that(p.cargo("run").arg("--bin").arg("a.rs"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("run").arg("--bin").arg("a.rs"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] no bin target named `a.rs`
-Did you mean `a`?"));
+Did you mean `a`?",
+ ),
+ );
- assert_that(p.cargo("run").arg("--example").arg("example.rs"),
- execs().with_status(101).with_stderr("\
-[ERROR] no example target named `example.rs`"));
+ assert_that(
+ p.cargo("run").arg("--example").arg("example.rs"),
+ execs().with_status(101).with_stderr(
+ "\
+ [ERROR] no example target named `example.rs`",
+ ),
+ );
- assert_that(p.cargo("run").arg("--example").arg("a.rs"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("run").arg("--example").arg("a.rs"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] no example target named `a.rs`
-Did you mean `a`?"));
+Did you mean `a`?",
+ ),
+ );
}
#[test]
fn either_name_or_example() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/bin/a.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/a.rs",
+ r#"
fn main() { println!("hello a.rs"); }
- "#)
- .file("examples/b.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/b.rs",
+ r#"
fn main() { println!("hello b.rs"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--bin").arg("a").arg("--example").arg("b"),
- execs().with_status(101)
- .with_stderr("[ERROR] `cargo run` can run at most one \
- executable, but multiple were \
- specified"));
+ assert_that(
+ p.cargo("run")
+ .arg("--bin")
+ .arg("a")
+ .arg("--example")
+ .arg("b"),
+ execs().with_status(101).with_stderr(
+ "[ERROR] `cargo run` can run at most one \
+ executable, but multiple were \
+ specified",
+ ),
+ );
}
#[test]
fn one_bin_multiple_examples() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("src/bin/main.rs", r#"
+ .file(
+ "src/bin/main.rs",
+ r#"
fn main() { println!("hello main.rs"); }
- "#)
- .file("examples/a.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/a.rs",
+ r#"
fn main() { println!("hello a.rs"); }
- "#)
- .file("examples/b.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/b.rs",
+ r#"
fn main() { println!("hello b.rs"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] `target[/]debug[/]main[EXE]`", dir = path2url(p.root())))
- .with_stdout("\
+[RUNNING] `target[/]debug[/]main[EXE]`",
+ dir = path2url(p.root())
+ ))
+ .with_stdout(
+ "\
hello main.rs
-"));
+",
+ ),
+ );
}
#[test]
fn example_with_release_flag() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
version = "*"
path = "bar"
- "#)
- .file("examples/a.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/a.rs",
+ r#"
extern crate bar;
fn main() {
}
bar::baz();
}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
- "#)
- .file("bar/src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/bar.rs",
+ r#"
pub fn baz() {
if cfg!(debug_assertions) {
println!("slow2")
println!("fast2")
}
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("-v").arg("--release").arg("--example").arg("a"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("run")
+ .arg("-v")
+ .arg("--release")
+ .arg("--example")
+ .arg("a"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 ({url}/bar)
[RUNNING] `rustc --crate-name bar bar[/]src[/]bar.rs --crate-type lib \
--emit=dep-info,link \
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `target[/]release[/]examples[/]a[EXE]`
",
- dir = p.root().display(),
- url = path2url(p.root()),
- ))
- .with_stdout("\
+ dir = p.root().display(),
+ url = path2url(p.root()),
+ ))
+ .with_stdout(
+ "\
fast1
-fast2"));
+fast2",
+ ),
+ );
- assert_that(p.cargo("run").arg("-v").arg("--example").arg("a"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("run").arg("-v").arg("--example").arg("a"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 ({url}/bar)
[RUNNING] `rustc --crate-name bar bar[/]src[/]bar.rs --crate-type lib \
--emit=dep-info,link \
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]examples[/]a[EXE]`
",
- dir = p.root().display(),
- url = path2url(p.root()),
- ))
- .with_stdout("\
+ dir = p.root().display(),
+ url = path2url(p.root()),
+ ))
+ .with_stdout(
+ "\
slow1
-slow2"));
+slow2",
+ ),
+ );
}
#[test]
fn run_dylib_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() { bar::bar(); }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
crate-type = ["dylib"]
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn bar() {}")
.build();
- assert_that(p.cargo("run").arg("hello").arg("world"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("run").arg("hello").arg("world"),
+ execs().with_status(0),
+ );
}
#[test]
fn release_works() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { if cfg!(debug_assertions) { panic!() } }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--release"),
- execs().with_status(0).with_stderr(&format!("\
+ assert_that(
+ p.cargo("run").arg("--release"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `target[/]release[/]foo[EXE]`
",
- dir = path2url(p.root()),
- )));
+ dir = path2url(p.root()),
+ )),
+ );
assert_that(&p.release_bin("foo"), existing_file());
}
#[test]
fn run_bin_different_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bin]]
name = "bar"
- "#)
- .file("src/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bar.rs",
+ r#"
fn main() { }
- "#)
+ "#,
+ )
.build();
assert_that(p.cargo("run"), execs().with_status(0));
#[test]
fn dashes_are_forwarded() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bin]]
name = "bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
let s: Vec<String> = std::env::args().collect();
assert_eq!(s[1], "a");
assert_eq!(s[2], "--");
assert_eq!(s[3], "b");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("--").arg("a").arg("--").arg("b"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("run").arg("--").arg("a").arg("--").arg("b"),
+ execs().with_status(0),
+ );
}
#[test]
fn run_from_executable_folder() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() { println!("hello"); }
- "#)
+ "#,
+ )
.build();
let cwd = p.root().join("target").join("debug");
p.cargo("build").exec_with_output().unwrap();
- assert_that(p.cargo("run").cwd(cwd),
- execs().with_status(0)
- .with_stderr("\
-[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\
-[RUNNING] `.[/]foo[EXE]`")
- .with_stdout("\
+ assert_that(
+ p.cargo("run").cwd(cwd),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
+ [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\
+ [RUNNING] `.[/]foo[EXE]`",
+ )
+ .with_stdout(
+ "\
hello
-"));
+",
+ ),
+ );
}
#[test]
let mut dir2 = p.target_debug_dir();
dir2.push("dir=containing=equal=signs");
- let p = p
- .file("Cargo.toml", r#"
+ let p = p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", &format!(r##"
+ "#,
+ ).file(
+ "build.rs",
+ &format!(
+ r##"
fn main() {{
println!(r#"cargo:rustc-link-search=native={}"#);
println!(r#"cargo:rustc-link-search={}"#);
}}
- "##, dir1.display(), dir2.display()))
- .file("src/main.rs", &format!(r##"
+ "##,
+ dir1.display(),
+ dir2.display()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r##"
fn main() {{
let search_path = std::env::var_os("{}").unwrap();
let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
assert!(paths.contains(&r#"{}"#.into()));
assert!(paths.contains(&r#"{}"#.into()));
}}
- "##, dylib_path_envvar(), dir1.display(), dir2.display()))
+ "##,
+ dylib_path_envvar(),
+ dir1.display(),
+ dir2.display()
+ ),
+ )
.build();
assert_that(p.cargo("run"), execs().with_status(0));
let mut dir3 = p.target_debug_dir();
dir3.push("aaaaaaa");
- let p = p
- .file("Cargo.toml", r#"
+ let p = p.file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
- .file("build.rs", &format!(r##"
+ "#,
+ ).file(
+ "build.rs",
+ &format!(
+ r##"
fn main() {{
println!(r#"cargo:rustc-link-search=native={}"#);
println!(r#"cargo:rustc-link-search=native={}"#);
println!(r#"cargo:rustc-link-search=native={}"#);
}}
- "##, dir1.display(), dir2.display(), dir3.display()))
- .file("src/main.rs", &format!(r##"
+ "##,
+ dir1.display(),
+ dir2.display(),
+ dir3.display()
+ ),
+ )
+ .file(
+ "src/main.rs",
+ &format!(
+ r##"
fn main() {{
let search_path = std::env::var_os("{}").unwrap();
let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
assert_eq!("aaaaaaa", paths[1].file_name().unwrap().to_string_lossy());
assert_eq!("zzzzzzz", paths[2].file_name().unwrap().to_string_lossy());
}}
- "##, dylib_path_envvar()))
+ "##,
+ dylib_path_envvar()
+ ),
+ )
.build();
assert_that(p.cargo("run"), execs().with_status(0));
#[test]
fn fail_no_extra_verbose() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
std::process::exit(1);
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("run").arg("-q"),
- execs().with_status(1)
- .with_stdout("")
- .with_stderr(""));
+ assert_that(
+ p.cargo("run").arg("-q"),
+ execs().with_status(1).with_stdout("").with_stderr(""),
+ );
}
#[test]
fn run_multiple_packages() {
let p = project("foo")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name = "foo"
- "#)
+ "#,
+ )
.file("foo/src/foo.rs", "fn main() { println!(\"foo\"); }")
- .file("foo/d1/Cargo.toml", r#"
+ .file(
+ "foo/d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
[[bin]]
name = "d1"
- "#)
+ "#,
+ )
.file("foo/d1/src/lib.rs", "")
.file("foo/d1/src/main.rs", "fn main() { println!(\"d1\"); }")
- .file("foo/d2/Cargo.toml", r#"
+ .file(
+ "foo/d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.1"
[[bin]]
name = "d2"
- "#)
+ "#,
+ )
.file("foo/d2/src/main.rs", "fn main() { println!(\"d2\"); }")
- .file("d3/Cargo.toml", r#"
+ .file(
+ "d3/Cargo.toml",
+ r#"
[package]
name = "d3"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("d3/src/main.rs", "fn main() { println!(\"d2\"); }")
.build();
process_builder
};
- assert_that(cargo().arg("-p").arg("d1"),
- execs().with_status(0).with_stdout("d1"));
+ assert_that(
+ cargo().arg("-p").arg("d1"),
+ execs().with_status(0).with_stdout("d1"),
+ );
- assert_that(cargo().arg("-p").arg("d2").arg("--bin").arg("d2"),
- execs().with_status(0).with_stdout("d2"));
+ assert_that(
+ cargo().arg("-p").arg("d2").arg("--bin").arg("d2"),
+ execs().with_status(0).with_stdout("d2"),
+ );
- assert_that(cargo(),
- execs().with_status(0).with_stdout("foo"));
+ assert_that(cargo(), execs().with_status(0).with_stdout("foo"));
assert_that(cargo().arg("-p").arg("d1").arg("-p").arg("d2"),
execs()
error: The argument '--package <SPEC>' was provided more than once, but cannot be used multiple times
"));
- assert_that(cargo().arg("-p").arg("d3"),
- execs()
- .with_status(101)
- .with_stderr_contains("[ERROR] package `d3` is not a member of the workspace"));
+ assert_that(
+ cargo().arg("-p").arg("d3"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains("[ERROR] package `d3` is not a member of the workspace"),
+ );
}
use hamcrest::assert_that;
const CARGO_RUSTC_ERROR: &'static str =
-"[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering
+ "[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering
the package by passing e.g. `--lib` or `--bin NAME` to specify a single target";
#[test]
fn build_lib_for_foo() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustc").arg("--lib").arg("-v"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("rustc").arg("--lib").arg("-v"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.root().display(), url = p.url())));
+",
+ dir = p.root().display(),
+ url = p.url()
+ )),
+ );
}
#[test]
fn lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustc").arg("--lib").arg("-v")
- .arg("--").arg("-C").arg("debug-assertions=off"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("rustc")
+ .arg("--lib")
+ .arg("-v")
+ .arg("--")
+ .arg("-C")
+ .arg("debug-assertions=off"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.root().display(), url = p.url())))
+",
+ dir = p.root().display(),
+ url = p.url()
+ )),
+ )
}
#[test]
fn build_main_and_allow_unstable_options() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustc").arg("-v").arg("--bin").arg("foo")
- .arg("--").arg("-C").arg("debug-assertions"),
- execs()
- .with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("rustc")
+ .arg("-v")
+ .arg("--bin")
+ .arg("foo")
+ .arg("--")
+ .arg("-C")
+ .arg("debug-assertions"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] {name} v{version} ({url})
[RUNNING] `rustc --crate-name {name} src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
--extern {name}={dir}[/]target[/]debug[/]deps[/]lib{name}-[..].rlib`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.root().display(), url = p.url(),
- name = "foo", version = "0.0.1")));
+ dir = p.root().display(),
+ url = p.url(),
+ name = "foo",
+ version = "0.0.1"
+ )),
+ );
}
#[test]
fn fails_when_trying_to_build_main_and_lib_with_args() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustc").arg("-v")
- .arg("--").arg("-C").arg("debug-assertions"),
- execs()
- .with_status(101)
- .with_stderr(CARGO_RUSTC_ERROR));
+ assert_that(
+ p.cargo("rustc")
+ .arg("-v")
+ .arg("--")
+ .arg("-C")
+ .arg("debug-assertions"),
+ execs().with_status(101).with_stderr(CARGO_RUSTC_ERROR),
+ );
}
#[test]
fn build_with_args_to_one_of_multiple_binaries() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/bin/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/foo.rs",
+ r#"
fn main() {}
- "#)
- .file("src/bin/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/bar.rs",
+ r#"
fn main() {}
- "#)
- .file("src/bin/baz.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/baz.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustc").arg("-v").arg("--bin").arg("bar")
- .arg("--").arg("-C").arg("debug-assertions"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("rustc")
+ .arg("-v")
+ .arg("--bin")
+ .arg("bar")
+ .arg("--")
+ .arg("-C")
+ .arg("debug-assertions"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib --emit=dep-info,link \
-C debuginfo=2 -C metadata=[..] \
[RUNNING] `rustc --crate-name bar src[/]bin[/]bar.rs --crate-type bin --emit=dep-info,link \
-C debuginfo=2 -C debug-assertions [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = p.url())));
+",
+ url = p.url()
+ )),
+ );
}
#[test]
fn fails_with_args_to_all_binaries() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/bin/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/foo.rs",
+ r#"
fn main() {}
- "#)
- .file("src/bin/bar.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/bar.rs",
+ r#"
fn main() {}
- "#)
- .file("src/bin/baz.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/baz.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustc").arg("-v")
- .arg("--").arg("-C").arg("debug-assertions"),
- execs()
- .with_status(101)
- .with_stderr(CARGO_RUSTC_ERROR));
+ assert_that(
+ p.cargo("rustc")
+ .arg("-v")
+ .arg("--")
+ .arg("-C")
+ .arg("debug-assertions"),
+ execs().with_status(101).with_stderr(CARGO_RUSTC_ERROR),
+ );
}
#[test]
fn build_with_args_to_one_of_multiple_tests() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("tests/foo.rs", r#" "#)
.file("tests/bar.rs", r#" "#)
.file("tests/baz.rs", r#" "#)
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustc").arg("-v").arg("--test").arg("bar")
- .arg("--").arg("-C").arg("debug-assertions"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("rustc")
+ .arg("-v")
+ .arg("--test")
+ .arg("bar")
+ .arg("--")
+ .arg("-C")
+ .arg("debug-assertions"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib --emit=dep-info,link \
-C debuginfo=2 -C metadata=[..] \
[RUNNING] `rustc --crate-name bar tests[/]bar.rs --emit=dep-info,link -C debuginfo=2 \
-C debug-assertions [..]--test[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = p.url())));
+",
+ url = p.url()
+ )),
+ );
}
#[test]
fn build_foo_with_bar_dependency() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
bar::baz()
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("rustc").arg("-v").arg("--").arg("-C").arg("debug-assertions"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ foo.cargo("rustc")
+ .arg("-v")
+ .arg("--")
+ .arg("-C")
+ .arg("debug-assertions"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] bar v0.1.0 ([..])
[RUNNING] `[..] -C debuginfo=2 [..]`
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `[..] -C debuginfo=2 -C debug-assertions [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = foo.url())));
+",
+ url = foo.url()
+ )),
+ );
}
#[test]
fn build_only_bar_dependency() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
bar::baz()
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("rustc").arg("-v").arg("-p").arg("bar")
- .arg("--").arg("-C").arg("debug-assertions"),
- execs()
- .with_status(0)
- .with_stderr("\
+ assert_that(
+ foo.cargo("rustc")
+ .arg("-v")
+ .arg("-p")
+ .arg("bar")
+ .arg("--")
+ .arg("-C")
+ .arg("debug-assertions"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] bar v0.1.0 ([..])
[RUNNING] `rustc --crate-name bar [..] --crate-type lib [..] -C debug-assertions [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn fail_with_multiple_packages() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.baz]
path = "../baz"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
if cfg!(flag = "1") { println!("Yeah from bar!"); }
}
- "#)
+ "#,
+ )
.build();
let _baz = project("baz")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "baz"
version = "0.1.0"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
if cfg!(flag = "1") { println!("Yeah from baz!"); }
}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("rustc").arg("-v").arg("-p").arg("bar")
- .arg("-p").arg("baz"),
- execs().with_status(1).with_stderr_contains("\
+ assert_that(
+ foo.cargo("rustc")
+ .arg("-v")
+ .arg("-p")
+ .arg("bar")
+ .arg("-p")
+ .arg("baz"),
+ execs().with_status(1).with_stderr_contains(
+ "\
error: The argument '--package <SPEC>' was provided more than once, \
but cannot be used multiple times
-"));
+",
+ ),
+ );
}
#[test]
fn rustc_with_other_profile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dev-dependencies]
a = { path = "a" }
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[cfg(test)] extern crate a;
#[test]
fn foo() {}
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("rustc").arg("--profile").arg("test"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("rustc").arg("--profile").arg("test"),
+ execs().with_status(0),
+ );
}
use cargotest::support::{execs, project};
-use hamcrest::{assert_that};
+use hamcrest::assert_that;
#[test]
fn rustdoc_simple() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustdoc").arg("-v"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("rustdoc").arg("-v"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[DOCUMENTING] foo v0.0.1 ({url})
[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \
-o {dir}[/]target[/]doc \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.root().display(), url = p.url())));
+",
+ dir = p.root().display(),
+ url = p.url()
+ )),
+ );
}
#[test]
fn rustdoc_args() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[DOCUMENTING] foo v0.0.1 ({url})
[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \
-o {dir}[/]target[/]doc \
--cfg=foo \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.root().display(), url = p.url())));
+",
+ dir = p.root().display(),
+ url = p.url()
+ )),
+ );
}
-
-
#[test]
fn rustdoc_foo_with_bar_dependency() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate bar;
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ foo.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] bar v0.0.1 ([..])
[RUNNING] `rustc [..]bar[/]src[/]lib.rs [..]`
[DOCUMENTING] foo v0.0.1 ({url})
-L dependency={dir}[/]target[/]debug[/]deps \
--extern [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = foo.root().display(), url = foo.url())));
+",
+ dir = foo.root().display(),
+ url = foo.url()
+ )),
+ );
}
#[test]
fn rustdoc_only_bar_dependency() {
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
extern crate bar;
fn main() {
bar::baz()
}
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn baz() {}
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("rustdoc").arg("-v").arg("-p").arg("bar")
- .arg("--").arg("--cfg=foo"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ foo.cargo("rustdoc")
+ .arg("-v")
+ .arg("-p")
+ .arg("bar")
+ .arg("--")
+ .arg("--cfg=foo"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[DOCUMENTING] bar v0.0.1 ([..])
[RUNNING] `rustdoc --crate-name bar [..]bar[/]src[/]lib.rs \
-o {dir}[/]target[/]doc \
--cfg=foo \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = foo.root().display())));
+",
+ dir = foo.root().display()
+ )),
+ );
}
-
#[test]
fn rustdoc_same_name_documents_lib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
+ "#,
+ )
.file("src/lib.rs", r#" "#)
.build();
- assert_that(p.cargo("rustdoc").arg("-v")
- .arg("--").arg("--cfg=foo"),
- execs()
- .with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ p.cargo("rustdoc").arg("-v").arg("--").arg("--cfg=foo"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[DOCUMENTING] foo v0.0.1 ([..])
[RUNNING] `rustdoc --crate-name foo src[/]lib.rs \
-o {dir}[/]target[/]doc \
--cfg=foo \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.root().display())));
+",
+ dir = p.root().display()
+ )),
+ );
}
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn parses_env() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[RUNNING] `rustdoc [..] --cfg=foo[..]`
-"));
+",
+ ),
+ );
}
#[test]
fn parses_config() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustdocflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("doc").arg("-v"),
- execs().with_status(0)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("doc").arg("-v"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[RUNNING] `rustdoc [..] --cfg foo[..]`
-"));
+",
+ ),
+ );
}
#[test]
fn bad_flags() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--bogus"),
- execs().with_status(101));
+ assert_that(
+ p.cargo("doc").env("RUSTDOCFLAGS", "--bogus"),
+ execs().with_status(101),
+ );
}
#[test]
fn rerun() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo"),
- execs().with_status(0));
- assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo"),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=bar"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=bar"),
+ execs().with_status(0).with_stderr(
+ "\
[DOCUMENTING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn rustdocflags_passed_to_rustdoc_through_cargo_test() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
//! ```
//! assert!(cfg!(do_not_choke));
//! ```
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("--doc").env("RUSTDOCFLAGS", "--cfg do_not_choke"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("test")
+ .arg("--doc")
+ .env("RUSTDOCFLAGS", "--cfg do_not_choke"),
+ execs().with_status(0),
+ );
}
#[test]
fn rustdocflags_passed_to_rustdoc_through_cargo_test_only_once() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("test").arg("--doc").env("RUSTDOCFLAGS", "--markdown-no-toc"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("test")
+ .arg("--doc")
+ .env("RUSTDOCFLAGS", "--markdown-no-toc"),
+ execs().with_status(0),
+ );
}
use std::fs::{self, File};
use cargotest::rustc_host;
-use cargotest::support::{project, project_in_home, execs, paths};
+use cargotest::support::{execs, paths, project, project_in_home};
use hamcrest::assert_that;
#[test]
fn env_rustflags_normal_source() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.file("tests/c.rs", "#[test] fn f() { }")
- .file("benches/d.rs", r#"
+ .file(
+ "benches/d.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run1(_ben: &mut test::Bencher) { }"#)
+ #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+ )
.build();
// Use RUSTFLAGS to pass an argument that will generate an error
- assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus")
- .arg("--lib"),
- execs().with_status(101));
- assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus")
- .arg("--bin=a"),
- execs().with_status(101));
- assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus")
- .arg("--example=b"),
- execs().with_status(101));
- assert_that(p.cargo("test").env("RUSTFLAGS", "-Z bogus"),
- execs().with_status(101));
- assert_that(p.cargo("bench").env("RUSTFLAGS", "-Z bogus"),
- execs().with_status(101));
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "-Z bogus").arg("--lib"),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "-Z bogus").arg("--bin=a"),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("build")
+ .env("RUSTFLAGS", "-Z bogus")
+ .arg("--example=b"),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("test").env("RUSTFLAGS", "-Z bogus"),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("bench").env("RUSTFLAGS", "-Z bogus"),
+ execs().with_status(101),
+ );
}
#[test]
// when --target is not specified.
// In this test if --cfg foo is passed the build will fail.
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() { }
#[cfg(not(foo))]
fn main() { }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_status(0),
+ );
}
#[test]
// when --target is not specified.
// In this test if --cfg foo is not passed the build will fail.
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[build-dependencies.bar]
path = "../bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() { }
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn bar() { }
#[cfg(not(foo))]
fn bar() { }
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_status(0),
+ );
}
#[test]
// when --target is not specified.
// In this test if --cfg foo is not passed the build will fail.
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
plugin = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn main() { }
#[cfg(not(foo))]
fn main() { }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_status(0),
+ );
}
#[test]
// when --target is not specified.
// In this test if --cfg foo is not passed the build will fail.
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn foo() { }
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn bar() { }
#[cfg(not(foo))]
fn bar() { }
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_status(0),
+ );
}
#[test]
fn env_rustflags_normal_source_with_target() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.file("tests/c.rs", "#[test] fn f() { }")
- .file("benches/d.rs", r#"
+ .file(
+ "benches/d.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run1(_ben: &mut test::Bencher) { }"#)
+ #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+ )
.build();
let host = &rustc_host();
// Use RUSTFLAGS to pass an argument that will generate an error
- assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus")
- .arg("--lib").arg("--target").arg(host),
- execs().with_status(101));
- assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus")
- .arg("--bin=a").arg("--target").arg(host),
- execs().with_status(101));
- assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus")
- .arg("--example=b").arg("--target").arg(host),
- execs().with_status(101));
- assert_that(p.cargo("test").env("RUSTFLAGS", "-Z bogus")
- .arg("--target").arg(host),
- execs().with_status(101));
- assert_that(p.cargo("bench").env("RUSTFLAGS", "-Z bogus")
- .arg("--target").arg(host),
- execs().with_status(101));
+ assert_that(
+ p.cargo("build")
+ .env("RUSTFLAGS", "-Z bogus")
+ .arg("--lib")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("build")
+ .env("RUSTFLAGS", "-Z bogus")
+ .arg("--bin=a")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("build")
+ .env("RUSTFLAGS", "-Z bogus")
+ .arg("--example=b")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("test")
+ .env("RUSTFLAGS", "-Z bogus")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("bench")
+ .env("RUSTFLAGS", "-Z bogus")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(101),
+ );
}
#[test]
// when --target is specified.
// In this test if --cfg foo is passed the build will fail.
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() { }
#[cfg(foo)]
fn main() { }
- "#)
+ "#,
+ )
.build();
let host = rustc_host();
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo")
- .arg("--target").arg(host),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build")
+ .env("RUSTFLAGS", "--cfg foo")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(0),
+ );
}
#[test]
// when --target is specified.
// In this test if --cfg foo is passed the build will fail.
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[build-dependencies.bar]
path = "../bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() { }
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn bar() { }
#[cfg(foo)]
fn bar() { }
- "#)
+ "#,
+ )
.build();
let host = rustc_host();
- assert_that(foo.cargo("build").env("RUSTFLAGS", "--cfg foo")
- .arg("--target").arg(host),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build")
+ .env("RUSTFLAGS", "--cfg foo")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(0),
+ );
}
#[test]
// when --target is specified.
// In this test if --cfg foo is passed the build will fail.
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
plugin = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn main() { }
#[cfg(foo)]
fn main() { }
- "#)
+ "#,
+ )
.build();
let host = rustc_host();
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo")
- .arg("--target").arg(host),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build")
+ .env("RUSTFLAGS", "--cfg foo")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(0),
+ );
}
#[test]
// when --target is specified.
// In this test if --cfg foo is passed the build will fail.
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn foo() { }
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn bar() { }
#[cfg(foo)]
fn bar() { }
- "#)
+ "#,
+ )
.build();
let host = rustc_host();
- assert_that(foo.cargo("build").env("RUSTFLAGS", "--cfg foo")
- .arg("--target").arg(host),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build")
+ .env("RUSTFLAGS", "--cfg foo")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(0),
+ );
}
#[test]
fn env_rustflags_recompile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
// Setting RUSTFLAGS forces a recompile
- assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus"),
- execs().with_status(101));
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "-Z bogus"),
+ execs().with_status(101),
+ );
}
#[test]
fn env_rustflags_recompile2() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_status(0),
+ );
// Setting RUSTFLAGS forces a recompile
- assert_that(p.cargo("build").env("RUSTFLAGS", "-Z bogus"),
- execs().with_status(101));
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "-Z bogus"),
+ execs().with_status(101),
+ );
}
#[test]
fn env_rustflags_no_recompile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_status(0));
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_stdout("").with_status(0));
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_stdout("").with_status(0),
+ );
}
#[test]
fn build_rustflags_normal_source() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.file("tests/c.rs", "#[test] fn f() { }")
- .file("benches/d.rs", r#"
+ .file(
+ "benches/d.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run1(_ben: &mut test::Bencher) { }"#)
- .file(".cargo/config", r#"
+ #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["-Z", "bogus"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build")
- .arg("--lib"),
- execs().with_status(101));
- assert_that(p.cargo("build")
- .arg("--bin=a"),
- execs().with_status(101));
- assert_that(p.cargo("build")
- .arg("--example=b"),
- execs().with_status(101));
- assert_that(p.cargo("test"),
- execs().with_status(101));
- assert_that(p.cargo("bench"),
- execs().with_status(101));
+ assert_that(p.cargo("build").arg("--lib"), execs().with_status(101));
+ assert_that(p.cargo("build").arg("--bin=a"), execs().with_status(101));
+ assert_that(
+ p.cargo("build").arg("--example=b"),
+ execs().with_status(101),
+ );
+ assert_that(p.cargo("test"), execs().with_status(101));
+ assert_that(p.cargo("bench"), execs().with_status(101));
}
#[test]
// when --target is not specified.
// In this test if --cfg foo is passed the build will fail.
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() { }
#[cfg(not(foo))]
fn main() { }
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
// when --target is not specified.
// In this test if --cfg foo is not passed the build will fail.
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[build-dependencies.bar]
path = "../bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() { }
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn bar() { }
#[cfg(not(foo))]
fn bar() { }
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("build"),
- execs().with_status(0));
+ assert_that(foo.cargo("build"), execs().with_status(0));
}
#[test]
// when --target is not specified.
// In this test if --cfg foo is not passed the build will fail.
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
plugin = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn main() { }
#[cfg(not(foo))]
fn main() { }
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
// when --target is not specified.
// In this test if --cfg foo is not passed the build will fail.
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn foo() { }
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn bar() { }
#[cfg(not(foo))]
fn bar() { }
- "#)
+ "#,
+ )
.build();
- assert_that(foo.cargo("build"),
- execs().with_status(0));
+ assert_that(foo.cargo("build"), execs().with_status(0));
}
#[test]
fn build_rustflags_normal_source_with_target() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.file("tests/c.rs", "#[test] fn f() { }")
- .file("benches/d.rs", r#"
+ .file(
+ "benches/d.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run1(_ben: &mut test::Bencher) { }"#)
- .file(".cargo/config", r#"
+ #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["-Z", "bogus"]
- "#)
+ "#,
+ )
.build();
let ref host = rustc_host();
// Use RUSTFLAGS to pass an argument that will generate an error
- assert_that(p.cargo("build")
- .arg("--lib").arg("--target").arg(host),
- execs().with_status(101));
- assert_that(p.cargo("build")
- .arg("--bin=a").arg("--target").arg(host),
- execs().with_status(101));
- assert_that(p.cargo("build")
- .arg("--example=b").arg("--target").arg(host),
- execs().with_status(101));
- assert_that(p.cargo("test")
- .arg("--target").arg(host),
- execs().with_status(101));
- assert_that(p.cargo("bench")
- .arg("--target").arg(host),
- execs().with_status(101));
+ assert_that(
+ p.cargo("build").arg("--lib").arg("--target").arg(host),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("build").arg("--bin=a").arg("--target").arg(host),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("build")
+ .arg("--example=b")
+ .arg("--target")
+ .arg(host),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("test").arg("--target").arg(host),
+ execs().with_status(101),
+ );
+ assert_that(
+ p.cargo("bench").arg("--target").arg(host),
+ execs().with_status(101),
+ );
}
#[test]
// when --target is specified.
// In this test if --cfg foo is passed the build will fail.
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
build = "build.rs"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() { }
#[cfg(foo)]
fn main() { }
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
let host = rustc_host();
- assert_that(p.cargo("build")
- .arg("--target").arg(host),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--target").arg(host),
+ execs().with_status(0),
+ );
}
#[test]
// when --target is specified.
// In this test if --cfg foo is passed the build will fail.
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[build-dependencies.bar]
path = "../bar"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
fn main() { }
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn bar() { }
#[cfg(foo)]
fn bar() { }
- "#)
+ "#,
+ )
.build();
let host = rustc_host();
- assert_that(foo.cargo("build")
- .arg("--target").arg(host),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build").arg("--target").arg(host),
+ execs().with_status(0),
+ );
}
#[test]
// when --target is specified.
// In this test if --cfg foo is passed the build will fail.
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
plugin = true
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn main() { }
#[cfg(foo)]
fn main() { }
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
let host = rustc_host();
- assert_that(p.cargo("build")
- .arg("--target").arg(host),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--target").arg(host),
+ execs().with_status(0),
+ );
}
#[test]
// when --target is specified.
// In this test if --cfg foo is passed the build will fail.
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "../bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn foo() { }
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
let _bar = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
fn bar() { }
#[cfg(foo)]
fn bar() { }
- "#)
+ "#,
+ )
.build();
let host = rustc_host();
- assert_that(foo.cargo("build")
- .arg("--target").arg(host),
- execs().with_status(0));
+ assert_that(
+ foo.cargo("build").arg("--target").arg(host),
+ execs().with_status(0),
+ );
}
#[test]
fn build_rustflags_recompile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
// Setting RUSTFLAGS forces a recompile
let config = r#"
let mut config_file = File::create(config_file).unwrap();
config_file.write_all(config.as_bytes()).unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(101));
+ assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn build_rustflags_recompile2() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_status(0),
+ );
// Setting RUSTFLAGS forces a recompile
let config = r#"
let mut config_file = File::create(config_file).unwrap();
config_file.write_all(config.as_bytes()).unwrap();
- assert_that(p.cargo("build"),
- execs().with_status(101));
+ assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn build_rustflags_no_recompile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_status(0));
- assert_that(p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
- execs().with_stdout("").with_status(0));
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build").env("RUSTFLAGS", "--cfg foo"),
+ execs().with_stdout("").with_status(0),
+ );
}
#[test]
let home = paths::home();
let home_config = home.join(".cargo");
fs::create_dir(&home_config).unwrap();
- File::create(&home_config.join("config")).unwrap().write_all(br#"
+ File::create(&home_config.join("config"))
+ .unwrap()
+ .write_all(
+ br#"
[build]
rustflags = ["-Cllvm-args=-x86-asm-syntax=intel"]
- "#).unwrap();
+ "#,
+ )
+ .unwrap();
// And we need the project to be inside the home directory
// so the walking process finds the home project twice.
let p = project_in_home("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn target_rustflags_normal_source() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.file("tests/c.rs", "#[test] fn f() { }")
- .file("benches/d.rs", r#"
+ .file(
+ "benches/d.rs",
+ r#"
#![feature(test)]
extern crate test;
- #[bench] fn run1(_ben: &mut test::Bencher) { }"#)
- .file(".cargo/config", &format!("
+ #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ "
[target.{}]
rustflags = [\"-Z\", \"bogus\"]
- ", rustc_host()))
+ ",
+ rustc_host()
+ ),
+ )
.build();
- assert_that(p.cargo("build")
- .arg("--lib"),
- execs().with_status(101));
- assert_that(p.cargo("build")
- .arg("--bin=a"),
- execs().with_status(101));
- assert_that(p.cargo("build")
- .arg("--example=b"),
- execs().with_status(101));
- assert_that(p.cargo("test"),
- execs().with_status(101));
- assert_that(p.cargo("bench"),
- execs().with_status(101));
+ assert_that(p.cargo("build").arg("--lib"), execs().with_status(101));
+ assert_that(p.cargo("build").arg("--bin=a"), execs().with_status(101));
+ assert_that(
+ p.cargo("build").arg("--example=b"),
+ execs().with_status(101),
+ );
+ assert_that(p.cargo("test"), execs().with_status(101));
+ assert_that(p.cargo("bench"), execs().with_status(101));
}
// target.{}.rustflags takes precedence over build.rustflags
#[test]
fn target_rustflags_precedence() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", &format!("
+ .file(
+ ".cargo/config",
+ &format!(
+ "
[build]
rustflags = [\"--cfg\", \"foo\"]
[target.{}]
rustflags = [\"-Z\", \"bogus\"]
- ", rustc_host()))
+ ",
+ rustc_host()
+ ),
+ )
.build();
- assert_that(p.cargo("build")
- .arg("--lib"),
- execs().with_status(101));
- assert_that(p.cargo("build")
- .arg("--bin=a"),
- execs().with_status(101));
- assert_that(p.cargo("build")
- .arg("--example=b"),
- execs().with_status(101));
- assert_that(p.cargo("test"),
- execs().with_status(101));
- assert_that(p.cargo("bench"),
- execs().with_status(101));
+ assert_that(p.cargo("build").arg("--lib"), execs().with_status(101));
+ assert_that(p.cargo("build").arg("--bin=a"), execs().with_status(101));
+ assert_that(
+ p.cargo("build").arg("--example=b"),
+ execs().with_status(101),
+ );
+ assert_that(p.cargo("test"), execs().with_status(101));
+ assert_that(p.cargo("bench"), execs().with_status(101));
}
#[test]
fn cfg_rustflags_normal_source() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn t() {}")
.file("src/bin/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.file("tests/c.rs", "#[test] fn f() { }")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.'cfg({})']
rustflags = ["--cfg", "bar"]
- "#, if rustc_host().contains("-windows-") {"windows"} else {"not(windows)"}))
+ "#,
+ if rustc_host().contains("-windows-") {
+ "windows"
+ } else {
+ "not(windows)"
+ }
+ ),
+ )
.build();
- assert_that(p.cargo("build").arg("--lib").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--lib").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("build").arg("--bin=a").arg("-v"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build").arg("--bin=a").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("build").arg("--example=b").arg("-v"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build").arg("--example=b").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("test").arg("--no-run").arg("-v"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("test").arg("--no-run").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[RUNNING] `rustc [..] --cfg bar[..]`
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("bench").arg("--no-run").arg("-v"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("bench").arg("--no-run").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[RUNNING] `rustc [..] --cfg bar[..]`
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] release [optimized] target(s) in [..]
-"));
-
+",
+ ),
+ );
}
// target.'cfg(...)'.rustflags takes precedence over build.rustflags
#[test]
fn cfg_rustflags_precedence() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "pub fn t() {}")
.file("src/bin/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.file("tests/c.rs", "#[test] fn f() { }")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[build]
rustflags = ["--cfg", "foo"]
[target.'cfg({})']
rustflags = ["--cfg", "bar"]
- "#, if rustc_host().contains("-windows-") { "windows" } else { "not(windows)" }))
+ "#,
+ if rustc_host().contains("-windows-") {
+ "windows"
+ } else {
+ "not(windows)"
+ }
+ ),
+ )
.build();
- assert_that(p.cargo("build").arg("--lib").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--lib").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("build").arg("--bin=a").arg("-v"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build").arg("--bin=a").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("build").arg("--example=b").arg("-v"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build").arg("--example=b").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("test").arg("--no-run").arg("-v"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("test").arg("--no-run").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[RUNNING] `rustc [..] --cfg bar[..]`
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("bench").arg("--no-run").arg("-v"),
- execs().with_status(0).with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("bench").arg("--no-run").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg bar[..]`
[RUNNING] `rustc [..] --cfg bar[..]`
[RUNNING] `rustc [..] --cfg bar[..]`
[FINISHED] release [optimized] target(s) in [..]
-"));
-
+",
+ ),
+ );
}
#[test]
fn target_rustflags_string_and_array_form1() {
let p1 = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = ["--cfg", "foo"]
- "#)
+ "#,
+ )
.build();
- assert_that(p1.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p1.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg foo[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
let p2 = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
rustflags = "--cfg foo"
- "#)
+ "#,
+ )
.build();
- assert_that(p2.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p2.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg foo[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
+",
+ ),
+ );
}
#[test]
fn target_rustflags_string_and_array_form2() {
let p1 = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
- .file(".cargo/config", &format!(r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}]
rustflags = ["--cfg", "foo"]
- "#, rustc_host()))
+ "#,
+ rustc_host()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p1.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p1.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg foo[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
let p2 = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
- .file(".cargo/config", &format!(r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}]
rustflags = "--cfg foo"
- "#, rustc_host()))
+ "#,
+ rustc_host()
+ ),
+ )
.file("src/lib.rs", "")
.build();
- assert_that(p2.cargo("build").arg("-v"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p2.cargo("build").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] --cfg foo[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn two_matching_in_config() {
let p1 = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
- .file(".cargo/config", r#"
+ "#,
+ )
+ .file(
+ ".cargo/config",
+ r#"
[target.'cfg(unix)']
rustflags = ["--cfg", 'foo="a"']
[target.'cfg(windows)']
rustflags = ["--cfg", 'foo="b"']
[target.'cfg(target_pointer_width = "64")']
rustflags = ["--cfg", 'foo="b"']
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {
if cfg!(foo = "a") {
println!("a");
panic!()
}
}
- "#)
+ "#,
+ )
.build();
assert_that(p1.cargo("run"), execs().with_status(0));
- assert_that(p1.cargo("build"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p1.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] [..]
-"));
+",
+ ),
+ );
}
use hamcrest::assert_that;
use url::Url;
-fn registry_path() -> PathBuf { paths::root().join("registry") }
-fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() }
-fn api_path() -> PathBuf { paths::root().join("api") }
-fn api() -> Url { Url::from_file_path(&*api_path()).ok().unwrap() }
+fn registry_path() -> PathBuf {
+ paths::root().join("registry")
+}
+fn registry() -> Url {
+ Url::from_file_path(&*registry_path()).ok().unwrap()
+}
+fn api_path() -> PathBuf {
+ paths::root().join("api")
+}
+fn api() -> Url {
+ Url::from_file_path(&*api_path()).ok().unwrap()
+}
fn setup() {
let config = paths::root().join(".cargo/config");
fs::create_dir_all(&api_path().join("api/v1")).unwrap();
let _ = repo(®istry_path())
- .file("config.json", &format!(r#"{{
+ .file(
+ "config.json",
+ &format!(
+ r#"{{
"dl": "{0}",
"api": "{0}"
- }}"#, api()))
+ }}"#,
+ api()
+ ),
+ )
.build();
}
//
// On windows, though, `?` is an invalid character, but we always build curl
// from source there anyway!
- File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap();
+ File::create(&base)
+ .unwrap()
+ .write_all(contents.as_bytes())
+ .unwrap();
if !cfg!(windows) {
- File::create(&base.with_file_name("crates?q=postgres&per_page=10")).unwrap()
- .write_all(contents.as_bytes()).unwrap();
+ File::create(&base.with_file_name("crates?q=postgres&per_page=10"))
+ .unwrap()
+ .write_all(contents.as_bytes())
+ .unwrap();
}
- assert_that(cargo_process("search").arg("postgres")
- .arg("--index").arg(registry().to_string()),
- execs().with_status(0)
- .with_stdout_contains("\
-hoare = \"0.1.1\" # Design by contract style assertions for Rust"));
+ assert_that(
+ cargo_process("search")
+ .arg("postgres")
+ .arg("--index")
+ .arg(registry().to_string()),
+ execs().with_status(0).with_stdout_contains(
+ "\
+ hoare = \"0.1.1\" # Design by contract style assertions for Rust",
+ ),
+ );
}
// TODO: Deprecated
//
// On windows, though, `?` is an invalid character, but we always build curl
// from source there anyway!
- File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap();
+ File::create(&base)
+ .unwrap()
+ .write_all(contents.as_bytes())
+ .unwrap();
if !cfg!(windows) {
- File::create(&base.with_file_name("crates?q=postgres&per_page=10")).unwrap()
- .write_all(contents.as_bytes()).unwrap();
+ File::create(&base.with_file_name("crates?q=postgres&per_page=10"))
+ .unwrap()
+ .write_all(contents.as_bytes())
+ .unwrap();
}
- assert_that(cargo_process("search").arg("postgres")
- .arg("--host").arg(registry().to_string()),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ cargo_process("search")
+ .arg("postgres")
+ .arg("--host")
+ .arg(registry().to_string()),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[WARNING] The flag '--host' is no longer valid.
Previous versions of Cargo accepted this flag, but it is being
about this warning.
[UPDATING] registry `{reg}`
",
- reg = registry()))
- .with_stdout_contains("\
-hoare = \"0.1.1\" # Design by contract style assertions for Rust"));
+ reg = registry()
+ ))
+ .with_stdout_contains(
+ "\
+ hoare = \"0.1.1\" # Design by contract style assertions for Rust",
+ ),
+ );
}
// TODO: Deprecated
//
// On windows, though, `?` is an invalid character, but we always build curl
// from source there anyway!
- File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap();
+ File::create(&base)
+ .unwrap()
+ .write_all(contents.as_bytes())
+ .unwrap();
if !cfg!(windows) {
- File::create(&base.with_file_name("crates?q=postgres&per_page=10")).unwrap()
- .write_all(contents.as_bytes()).unwrap();
+ File::create(&base.with_file_name("crates?q=postgres&per_page=10"))
+ .unwrap()
+ .write_all(contents.as_bytes())
+ .unwrap();
}
- assert_that(cargo_process("search").arg("postgres")
- .arg("--index").arg(registry().to_string())
- .arg("--host").arg(registry().to_string()),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ cargo_process("search")
+ .arg("postgres")
+ .arg("--index")
+ .arg(registry().to_string())
+ .arg("--host")
+ .arg(registry().to_string()),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[WARNING] The flag '--host' is no longer valid.
Previous versions of Cargo accepted this flag, but it is being
about this warning.
[UPDATING] registry `{reg}`
",
- reg = registry()))
- .with_stdout_contains("\
-hoare = \"0.1.1\" # Design by contract style assertions for Rust"));
+ reg = registry()
+ ))
+ .with_stdout_contains(
+ "\
+ hoare = \"0.1.1\" # Design by contract style assertions for Rust",
+ ),
+ );
}
#[test]
//
// On windows, though, `?` is an invalid character, but we always build curl
// from source there anyway!
- File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap();
+ File::create(&base)
+ .unwrap()
+ .write_all(contents.as_bytes())
+ .unwrap();
if !cfg!(windows) {
- File::create(&base.with_file_name("crates?q=postgres+sql&per_page=10")).unwrap()
- .write_all(contents.as_bytes()).unwrap();
+ File::create(&base.with_file_name("crates?q=postgres+sql&per_page=10"))
+ .unwrap()
+ .write_all(contents.as_bytes())
+ .unwrap();
}
- assert_that(cargo_process("search").arg("postgres").arg("sql")
- .arg("--index").arg(registry().to_string()),
- execs().with_status(0)
- .with_stdout_contains("\
-hoare = \"0.1.1\" # Design by contract style assertions for Rust"));
+ assert_that(
+ cargo_process("search")
+ .arg("postgres")
+ .arg("sql")
+ .arg("--index")
+ .arg(registry().to_string()),
+ execs().with_status(0).with_stdout_contains(
+ "\
+ hoare = \"0.1.1\" # Design by contract style assertions for Rust",
+ ),
+ );
}
#[test]
fn help() {
- assert_that(cargo_process("search").arg("-h"),
- execs().with_status(0));
- assert_that(cargo_process("help").arg("search"),
- execs().with_status(0));
+ assert_that(cargo_process("search").arg("-h"), execs().with_status(0));
+ assert_that(cargo_process("help").arg("search"), execs().with_status(0));
// Ensure that help output goes to stdout, not stderr.
- assert_that(cargo_process("search").arg("--help"),
- execs().with_stderr(""));
- assert_that(cargo_process("search").arg("--help"),
- execs().with_stdout_contains("[..] --frozen [..]"));
+ assert_that(
+ cargo_process("search").arg("--help"),
+ execs().with_stderr(""),
+ );
+ assert_that(
+ cargo_process("search").arg("--help"),
+ execs().with_stdout_contains("[..] --frozen [..]"),
+ );
}
const N: usize = 50;
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
Package::new("bar", "0.1.0").publish();
- assert_that(foo.cargo("build"),
- execs().with_status(0));
+ assert_that(foo.cargo("build"), execs().with_status(0));
let index = find_index();
let path = paths::home().join("tmp");
for _ in 0..N {
git::commit(&repo);
- index.remote_anonymous(&url).unwrap()
- .fetch(&["refs/heads/master:refs/remotes/foo/master"],
- None,
- None).unwrap();
+ index
+ .remote_anonymous(&url)
+ .unwrap()
+ .fetch(&["refs/heads/master:refs/remotes/foo/master"], None, None)
+ .unwrap();
}
drop((repo, index));
Package::new("bar", "0.1.1").publish();
- let before = find_index().join(".git/objects/pack")
- .read_dir().unwrap()
- .count();
+ let before = find_index()
+ .join(".git/objects/pack")
+ .read_dir()
+ .unwrap()
+ .count();
assert!(before > N);
let mut cmd = foo.cargo("update");
}
cmd.env("RUST_LOG", "trace");
assert_that(cmd, execs().with_status(0));
- let after = find_index().join(".git/objects/pack")
- .read_dir().unwrap()
- .count();
- assert!(after < before,
- "packfiles before: {}\n\
- packfiles after: {}", before, after);
+ let after = find_index()
+ .join(".git/objects/pack")
+ .read_dir()
+ .unwrap()
+ .count();
+ assert!(
+ after < before,
+ "packfiles before: {}\n\
+ packfiles after: {}",
+ before,
+ after
+ );
}
#[test]
fn use_git_gc() {
if Command::new("git").arg("--version").output().is_err() {
- return
+ return;
}
run_test(None);
}
fn avoid_using_git() {
let path = env::var_os("PATH").unwrap_or_default();
let mut paths = env::split_paths(&path).collect::<Vec<_>>();
- let idx = paths.iter().position(|p| {
- p.join("git").exists() || p.join("git.exe").exists()
- });
+ let idx = paths
+ .iter()
+ .position(|p| p.join("git").exists() || p.join("git.exe").exists());
match idx {
- Some(i) => { paths.remove(i); }
+ Some(i) => {
+ paths.remove(i);
+ }
None => return,
}
run_test(Some(&env::join_paths(&paths).unwrap()));
use std::str;
use cargo;
-use cargotest::{sleep_ms, is_nightly, rustc_host};
-use cargotest::support::{project, execs, basic_bin_manifest, basic_lib_manifest, cargo_exe};
+use cargotest::{is_nightly, rustc_host, sleep_ms};
+use cargotest::support::{basic_bin_manifest, basic_lib_manifest, cargo_exe, execs, project};
use cargotest::support::paths::CargoPathExt;
use cargotest::support::registry::Package;
use hamcrest::{assert_that, existing_file, is_not};
fn cargo_test_simple() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn hello() -> &'static str {
"hello"
}
#[test]
fn test_hello() {
assert_eq!(hello(), "hello")
- }"#)
+ }"#,
+ )
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("hello\n"));
-
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("hello\n"),
+ );
+
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.5.0 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url()))
- .with_stdout_contains("test test_hello ... ok"));
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ p.url()
+ ))
+ .with_stdout_contains("test test_hello ... ok"),
+ );
}
#[test]
fn cargo_test_release() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
[dependencies]
bar = { path = "bar" }
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate bar;
pub fn foo() { bar::bar(); }
#[test]
fn test() { foo(); }
- "#)
- .file("tests/test.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
extern crate foo;
#[test]
fn test() { foo::foo(); }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn bar() {}")
.build();
- assert_that(p.cargo("test").arg("-v").arg("--release"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test").arg("-v").arg("--release"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] bar v0.0.1 ({dir}/bar)
[RUNNING] [..] -C opt-level=3 [..]
[COMPILING] foo v0.1.0 ({dir})
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE]`
[RUNNING] `[..]target[/]release[/]deps[/]test-[..][EXE]`
[DOCTEST] foo
-[RUNNING] `rustdoc --test [..]lib.rs[..]`", dir = p.url()))
- .with_stdout_contains_n("test test ... ok", 2)
- .with_stdout_contains("running 0 tests"));
+[RUNNING] `rustdoc --test [..]lib.rs[..]`",
+ dir = p.url()
+ ))
+ .with_stdout_contains_n("test test ... ok", 2)
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
return;
}
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.5.0"
[profile.release]
overflow-checks = true
- "#)
- .file("src/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "src/foo.rs",
+ r#"
use std::panic;
pub fn main() {
let r = panic::catch_unwind(|| {
[1, i32::max_value()].iter().sum::<i32>();
});
assert!(r.is_err());
- }"#)
+ }"#,
+ )
.build();
- assert_that(p.cargo("build").arg("--release"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("--release"), execs().with_status(0));
assert_that(&p.release_bin("foo"), existing_file());
- assert_that(process(&p.release_bin("foo")),
- execs().with_status(0).with_stdout(""));
+ assert_that(
+ process(&p.release_bin("foo")),
+ execs().with_status(0).with_stdout(""),
+ );
}
#[test]
fn cargo_test_verbose() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
#[test] fn test_hello() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v").arg("hello"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test").arg("-v").arg("hello"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.5.0 ({url})
[RUNNING] `rustc [..] src[/]main.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] `[..]target[/]debug[/]deps[/]foo-[..][EXE] hello`", url = p.url()))
- .with_stdout_contains("test test_hello ... ok"));
+[RUNNING] `[..]target[/]debug[/]deps[/]foo-[..][EXE] hello`",
+ url = p.url()
+ ))
+ .with_stdout_contains("test test_hello ... ok"),
+ );
}
#[test]
fn many_similar_names() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
#[test] fn lib_test() {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate foo;
fn main() {}
#[test] fn bin_test() { foo::foo() }
- ")
- .file("tests/foo.rs", r#"
+ ",
+ )
+ .file(
+ "tests/foo.rs",
+ r#"
extern crate foo;
#[test] fn test_test() { foo::foo() }
- "#)
+ "#,
+ )
.build();
let output = p.cargo("test").arg("-v").exec_with_output().unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
- assert!(output.contains("test bin_test"), "bin_test missing\n{}", output);
- assert!(output.contains("test lib_test"), "lib_test missing\n{}", output);
- assert!(output.contains("test test_test"), "test_test missing\n{}", output);
+ assert!(
+ output.contains("test bin_test"),
+ "bin_test missing\n{}",
+ output
+ );
+ assert!(
+ output.contains("test lib_test"),
+ "lib_test missing\n{}",
+ output
+ );
+ assert!(
+ output.contains("test test_test"),
+ "test_test missing\n{}",
+ output
+ );
}
#[test]
fn cargo_test_failing_test_in_bin() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
fn hello() -> &'static str {
"hello"
}
#[test]
fn test_hello() {
assert_eq!(hello(), "nope")
- }"#)
+ }"#,
+ )
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("hello\n"));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("hello\n"),
+ );
- assert_that(p.cargo("test"),
- execs().with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.5.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[ERROR] test failed, to rerun pass '--bin foo'", url = p.url()))
- .with_stdout_contains("
+[ERROR] test failed, to rerun pass '--bin foo'",
+ url = p.url()
+ ))
+ .with_stdout_contains(
+ "
running 1 test
test test_hello ... FAILED
failures:
---- test_hello stdout ----
-<tab>thread 'test_hello' panicked at 'assertion failed:[..]")
- .with_stdout_contains("[..]`(left == right)`[..]")
- .with_stdout_contains("[..]left: `\"hello\"`,[..]")
- .with_stdout_contains("[..]right: `\"nope\"`[..]")
- .with_stdout_contains("[..]src[/]main.rs:12[..]")
- .with_stdout_contains("\
+<tab>thread 'test_hello' panicked at 'assertion failed:[..]",
+ )
+ .with_stdout_contains("[..]`(left == right)`[..]")
+ .with_stdout_contains("[..]left: `\"hello\"`,[..]")
+ .with_stdout_contains("[..]right: `\"nope\"`[..]")
+ .with_stdout_contains("[..]src[/]main.rs:12[..]")
+ .with_stdout_contains(
+ "\
failures:
test_hello
-")
- .with_status(101));
+",
+ )
+ .with_status(101),
+ );
}
#[test]
fn cargo_test_failing_test_in_test() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
- .file("src/main.rs", r#"
+ .file(
+ "src/main.rs",
+ r#"
pub fn main() {
println!("hello");
- }"#)
- .file("tests/footest.rs", r#"
+ }"#,
+ )
+ .file(
+ "tests/footest.rs",
+ r#"
#[test]
fn test_hello() {
assert!(false)
- }"#)
+ }"#,
+ )
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
- assert_that(process(&p.bin("foo")),
- execs().with_status(0).with_stdout("hello\n"));
+ assert_that(
+ process(&p.bin("foo")),
+ execs().with_status(0).with_stdout("hello\n"),
+ );
- assert_that(p.cargo("test"),
- execs().with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.5.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]footest-[..][EXE]
-[ERROR] test failed, to rerun pass '--test footest'", url = p.url()))
- .with_stdout_contains("running 0 tests")
- .with_stdout_contains("\
+[ERROR] test failed, to rerun pass '--test footest'",
+ url = p.url()
+ ))
+ .with_stdout_contains("running 0 tests")
+ .with_stdout_contains(
+ "\
running 1 test
test test_hello ... FAILED
---- test_hello stdout ----
<tab>thread 'test_hello' panicked at 'assertion failed: false', \
tests[/]footest.rs:4[..]
-")
- .with_stdout_contains("\
+",
+ )
+ .with_stdout_contains(
+ "\
failures:
test_hello
-")
- .with_status(101));
+",
+ )
+ .with_status(101),
+ );
}
#[test]
fn cargo_test_failing_test_in_lib() {
let p = project("foo")
.file("Cargo.toml", &basic_lib_manifest("foo"))
- .file("src/lib.rs", r#"
+ .file(
+ "src/lib.rs",
+ r#"
#[test]
fn test_hello() {
assert!(false)
- }"#)
+ }"#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.5.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[ERROR] test failed, to rerun pass '--lib'", url = p.url()))
- .with_stdout_contains("\
+[ERROR] test failed, to rerun pass '--lib'",
+ url = p.url()
+ ))
+ .with_stdout_contains(
+ "\
test test_hello ... FAILED
failures:
---- test_hello stdout ----
<tab>thread 'test_hello' panicked at 'assertion failed: false', \
src[/]lib.rs:4[..]
-")
- .with_stdout_contains("\
+",
+ )
+ .with_stdout_contains(
+ "\
failures:
test_hello
-")
- .with_status(101));
+",
+ )
+ .with_status(101),
+ );
}
-
#[test]
fn test_with_lib_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[bin]]
name = "baz"
path = "src/main.rs"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
///
/// ```rust
/// extern crate foo;
///
pub fn foo(){}
#[test] fn lib_test() {}
- "#)
- .file("src/main.rs", "
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
#[allow(unused_extern_crates)]
extern crate foo;
#[test]
fn bin_test() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]baz-[..][EXE]
-[DOCTEST] foo", p.url()))
- .with_stdout_contains("test lib_test ... ok")
- .with_stdout_contains("test bin_test ... ok")
- .with_stdout_contains_n("test [..] ... ok", 3));
+[DOCTEST] foo",
+ p.url()
+ ))
+ .with_stdout_contains("test lib_test ... ok")
+ .with_stdout_contains("test bin_test ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3),
+ );
}
#[test]
fn test_with_deep_lib_dep() {
let p = project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies.foo]
path = "../foo"
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#[cfg(test)]
extern crate foo;
/// ```
fn bar_test() {
foo::foo();
}
- ")
+ ",
+ )
.build();
let _p2 = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
#[test]
fn foo_test() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ([..])
[COMPILING] bar v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[..]
-[DOCTEST] bar", dir = p.url()))
- .with_stdout_contains("test bar_test ... ok")
- .with_stdout_contains_n("test [..] ... ok", 2));
+[DOCTEST] bar",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test bar_test ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 2),
+ );
}
#[test]
fn external_test_explicit() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[test]]
name = "test"
path = "src/test.rs"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn get_hello() -> &'static str { "Hello" }
#[test]
fn internal_test() {}
- "#)
- .file("src/test.rs", r#"
+ "#,
+ )
+ .file(
+ "src/test.rs",
+ r#"
extern crate foo;
#[test]
fn external_test() { assert_eq!(foo::get_hello(), "Hello") }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]
-[DOCTEST] foo", p.url()))
- .with_stdout_contains("test internal_test ... ok")
- .with_stdout_contains("test external_test ... ok")
- .with_stdout_contains("running 0 tests"));
+[DOCTEST] foo",
+ p.url()
+ ))
+ .with_stdout_contains("test internal_test ... ok")
+ .with_stdout_contains("test external_test ... ok")
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
fn external_test_named_test() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[[test]]
name = "test"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("tests/test.rs", r#"
+ .file(
+ "tests/test.rs",
+ r#"
#[test]
fn foo() { }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0))
+ assert_that(p.cargo("test"), execs().with_status(0))
}
#[test]
fn external_test_implicit() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn get_hello() -> &'static str { "Hello" }
#[test]
fn internal_test() {}
- "#)
- .file("tests/external.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/external.rs",
+ r#"
extern crate foo;
#[test]
fn external_test() { assert_eq!(foo::get_hello(), "Hello") }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]external-[..][EXE]
-[DOCTEST] foo", p.url()))
- .with_stdout_contains("test internal_test ... ok")
- .with_stdout_contains("test external_test ... ok")
- .with_stdout_contains("running 0 tests"));
+[DOCTEST] foo",
+ p.url()
+ ))
+ .with_stdout_contains("test internal_test ... ok")
+ .with_stdout_contains("test external_test ... ok")
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
fn dont_run_examples() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
- "#)
- .file("examples/dont-run-me-i-will-fail.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ "#,
+ )
+ .file(
+ "examples/dont-run-me-i-will-fail.rs",
+ r#"
fn main() { panic!("Examples should not be run by 'cargo test'"); }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0));
+ assert_that(p.cargo("test"), execs().with_status(0));
}
#[test]
fn pass_through_command_line() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#[test] fn foo() {}
#[test] fn bar() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test").arg("bar"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("bar"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[DOCTEST] foo", dir = p.url()))
- .with_stdout_contains("test bar ... ok")
- .with_stdout_contains("running 0 tests"));
-
- assert_that(p.cargo("test").arg("foo"),
- execs().with_status(0)
- .with_stderr("\
+[DOCTEST] foo",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test bar ... ok")
+ .with_stdout_contains("running 0 tests"),
+ );
+
+ assert_that(
+ p.cargo("test").arg("foo"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[DOCTEST] foo")
- .with_stdout_contains("test foo ... ok")
- .with_stdout_contains("running 0 tests"));
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test foo ... ok")
+ .with_stdout_contains("running 0 tests"),
+ );
}
// Regression test for running cargo-test twice with
fn cargo_test_twice() {
let p = project("test_twice")
.file("Cargo.toml", &basic_lib_manifest("test_twice"))
- .file("src/test_twice.rs", r#"
+ .file(
+ "src/test_twice.rs",
+ r#"
#![crate_type = "rlib"]
#[test]
fn dummy_test() { }
- "#)
+ "#,
+ )
.build();
p.cargo("build");
for _ in 0..2 {
- assert_that(p.cargo("test"),
- execs().with_status(0));
+ assert_that(p.cargo("test"), execs().with_status(0));
}
}
#[test]
fn lib_bin_same_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
name = "foo"
[[bin]]
name = "foo"
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#[test] fn lib_test() {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
#[allow(unused_extern_crates)]
extern crate foo;
#[test]
fn bin_test() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr(format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[DOCTEST] foo", p.url()))
- .with_stdout_contains_n("test [..] ... ok", 2)
- .with_stdout_contains("running 0 tests"));
+[DOCTEST] foo",
+ p.url()
+ ))
+ .with_stdout_contains_n("test [..] ... ok", 2)
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
fn lib_with_standard_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
/// ```
/// syntax::foo();
/// ```
#[test]
fn foo_test() {}
- ")
- .file("tests/test.rs", "
+ ",
+ )
+ .file(
+ "tests/test.rs",
+ "
extern crate syntax;
#[test]
fn test() { syntax::foo() }
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]
-[DOCTEST] syntax", dir = p.url()))
- .with_stdout_contains("test foo_test ... ok")
- .with_stdout_contains("test test ... ok")
- .with_stdout_contains_n("test [..] ... ok", 3));
+[DOCTEST] syntax",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test foo_test ... ok")
+ .with_stdout_contains("test test ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3),
+ );
}
#[test]
fn lib_with_standard_name2() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
name = "syntax"
test = false
doctest = false
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]", dir = p.url()))
- .with_stdout_contains("test test ... ok"));
+[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test test ... ok"),
+ );
}
#[test]
fn lib_without_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
[lib]
test = false
doctest = false
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]", dir = p.url()))
- .with_stdout_contains("test test ... ok"));
+[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test test ... ok"),
+ );
}
#[test]
fn bin_without_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
[[bin]]
path = "src/main.rs"
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("test"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
- binary target bin.name is required"));
+ binary target bin.name is required",
+ ),
+ );
}
#[test]
fn bench_without_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
[[bench]]
path = "src/bench.rs"
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
- ")
- .file("src/bench.rs", "
+ ",
+ )
+ .file(
+ "src/bench.rs",
+ "
#![feature(test)]
extern crate syntax;
extern crate test;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("test"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
- benchmark target bench.name is required"));
+ benchmark target bench.name is required",
+ ),
+ );
}
#[test]
fn test_without_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
[[test]]
path = "src/test.rs"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn foo() {}
pub fn get_hello() -> &'static str { "Hello" }
- "#)
- .file("src/main.rs", "
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
- ")
- .file("src/test.rs", r#"
+ ",
+ )
+ .file(
+ "src/test.rs",
+ r#"
extern crate syntax;
#[test]
fn external_test() { assert_eq!(syntax::get_hello(), "Hello") }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("test"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
- test target test.name is required"));
+ test target test.name is required",
+ ),
+ );
}
#[test]
fn example_without_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "syntax"
version = "0.0.1"
[[example]]
path = "examples/example.rs"
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
pub fn foo() {}
- ")
- .file("src/main.rs", "
+ ",
+ )
+ .file(
+ "src/main.rs",
+ "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
- ")
- .file("examples/example.rs", r#"
+ ",
+ )
+ .file(
+ "examples/example.rs",
+ r#"
extern crate syntax;
fn main() {
println!("example1");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("test"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] failed to parse manifest at `[..]`
Caused by:
- example target example.name is required"));
+ example target example.name is required",
+ ),
+ );
}
#[test]
fn bin_there_for_integration() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/main.rs", "
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ "
fn main() { std::process::exit(101); }
#[test] fn main_test() {}
- ")
- .file("tests/foo.rs", r#"
+ ",
+ )
+ .file(
+ "tests/foo.rs",
+ r#"
use std::process::Command;
#[test]
fn test_test() {
let status = Command::new("target/debug/foo").status().unwrap();
assert_eq!(status.code(), Some(101));
}
- "#)
+ "#,
+ )
.build();
let output = p.cargo("test").arg("-v").exec_with_output().unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
- assert!(output.contains("main_test ... ok"), "no main_test\n{}", output);
- assert!(output.contains("test_test ... ok"), "no test_test\n{}", output);
+ assert!(
+ output.contains("main_test ... ok"),
+ "no main_test\n{}",
+ output
+ );
+ assert!(
+ output.contains("test_test ... ok"),
+ "no test_test\n{}",
+ output
+ );
}
#[test]
fn test_dylib() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.bar]
path = "bar"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate bar as the_bar;
pub fn bar() { the_bar::baz(); }
#[test]
fn foo() { bar(); }
- "#)
- .file("tests/test.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/test.rs",
+ r#"
extern crate foo as the_foo;
#[test]
fn foo() { the_foo::bar(); }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[lib]
name = "bar"
crate_type = ["dylib"]
- "#)
- .file("bar/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ "
pub fn baz() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] bar v0.0.1 ({dir}/bar)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]", dir = p.url()))
- .with_stdout_contains_n("test foo ... ok", 2));
+[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains_n("test foo ... ok", 2),
+ );
p.root().move_into_the_past();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]")
- .with_stdout_contains_n("test foo ... ok", 2));
+[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]",
+ )
+ .with_stdout_contains_n("test foo ... ok", 2),
+ );
}
#[test]
fn test_twice_with_build_cmd() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("build.rs", "fn main() {}")
- .file("src/lib.rs", "
+ .file(
+ "src/lib.rs",
+ "
#[test]
fn foo() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[DOCTEST] foo", dir = p.url()))
- .with_stdout_contains("test foo ... ok")
- .with_stdout_contains("running 0 tests"));
-
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr("\
+[DOCTEST] foo",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test foo ... ok")
+ .with_stdout_contains("running 0 tests"),
+ );
+
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[DOCTEST] foo")
- .with_stdout_contains("test foo ... ok")
- .with_stdout_contains("running 0 tests"));
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test foo ... ok")
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
fn test_then_build() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#[test]
fn foo() {}
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[DOCTEST] foo", dir = p.url()))
- .with_stdout_contains("test foo ... ok")
- .with_stdout_contains("running 0 tests"));
+[DOCTEST] foo",
+ dir = p.url()
+ ))
+ .with_stdout_contains("test foo ... ok")
+ .with_stdout_contains("running 0 tests"),
+ );
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
}
#[test]
fn test_no_run() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", "
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "
#[test]
fn foo() { panic!() }
- ")
+ ",
+ )
.build();
- assert_that(p.cargo("test").arg("--no-run"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("--no-run"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn test_run_specific_bin_target() {
let prj = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name="bin2"
path="src/bin2.rs"
- "#)
+ "#,
+ )
.file("src/bin1.rs", "#[test] fn test1() { }")
.file("src/bin2.rs", "#[test] fn test2() { }")
.build();
- assert_that(prj.cargo("test").arg("--bin").arg("bin2"),
- execs().with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ prj.cargo("test").arg("--bin").arg("bin2"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]bin2-[..][EXE]", dir = prj.url()))
- .with_stdout_contains("test test2 ... ok"));
+[RUNNING] target[/]debug[/]deps[/]bin2-[..][EXE]",
+ dir = prj.url()
+ ))
+ .with_stdout_contains("test test2 ... ok"),
+ );
}
#[test]
fn test_run_implicit_bin_target() {
let prj = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name="mybin"
path="src/mybin.rs"
- "#)
- .file("src/mybin.rs", "#[test] fn test_in_bin() { }
- fn main() { panic!(\"Don't execute me!\"); }")
+ "#,
+ )
+ .file(
+ "src/mybin.rs",
+ "#[test] fn test_in_bin() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
.file("tests/mytest.rs", "#[test] fn test_in_test() { }")
.file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
- .file("examples/myexm.rs", "#[test] fn test_in_exm() { }
- fn main() { panic!(\"Don't execute me!\"); }")
- .build();
-
- assert_that(prj.cargo("test").arg("--bins"),
- execs().with_status(0)
- .with_stderr(format!("\
+ .file(
+ "examples/myexm.rs",
+ "#[test] fn test_in_exm() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .build();
+
+ assert_that(
+ prj.cargo("test").arg("--bins"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]mybin-[..][EXE]", dir = prj.url()))
- .with_stdout_contains("test test_in_bin ... ok"));
+[RUNNING] target[/]debug[/]deps[/]mybin-[..][EXE]",
+ dir = prj.url()
+ ))
+ .with_stdout_contains("test test_in_bin ... ok"),
+ );
}
#[test]
fn test_run_specific_test_target() {
let prj = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/a.rs", "fn main() { }")
.file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }")
.file("tests/a.rs", "#[test] fn test_a() { }")
.file("tests/b.rs", "#[test] fn test_b() { }")
.build();
- assert_that(prj.cargo("test").arg("--test").arg("b"),
- execs().with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ prj.cargo("test").arg("--test").arg("b"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]", dir = prj.url()))
- .with_stdout_contains("test test_b ... ok"));
+[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]",
+ dir = prj.url()
+ ))
+ .with_stdout_contains("test test_b ... ok"),
+ );
}
#[test]
fn test_run_implicit_test_target() {
let prj = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name="mybin"
path="src/mybin.rs"
- "#)
- .file("src/mybin.rs", "#[test] fn test_in_bin() { }
- fn main() { panic!(\"Don't execute me!\"); }")
+ "#,
+ )
+ .file(
+ "src/mybin.rs",
+ "#[test] fn test_in_bin() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
.file("tests/mytest.rs", "#[test] fn test_in_test() { }")
.file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
- .file("examples/myexm.rs", "#[test] fn test_in_exm() { }
- fn main() { panic!(\"Don't execute me!\"); }")
- .build();
-
- assert_that(prj.cargo("test").arg("--tests"),
- execs().with_status(0)
- .with_stderr(format!("\
+ .file(
+ "examples/myexm.rs",
+ "#[test] fn test_in_exm() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .build();
+
+ assert_that(
+ prj.cargo("test").arg("--tests"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]mybin-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]mytest-[..][EXE]
-[RUNNING] target[/]debug[/]examples[/]myexm-[..][EXE]", dir = prj.url()))
- .with_stdout_contains("test test_in_test ... ok"));
+[RUNNING] target[/]debug[/]examples[/]myexm-[..][EXE]",
+ dir = prj.url()
+ ))
+ .with_stdout_contains("test test_in_test ... ok"),
+ );
}
#[test]
fn test_run_implicit_bench_target() {
let prj = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name="mybin"
path="src/mybin.rs"
- "#)
- .file("src/mybin.rs", "#[test] fn test_in_bin() { }
- fn main() { panic!(\"Don't execute me!\"); }")
+ "#,
+ )
+ .file(
+ "src/mybin.rs",
+ "#[test] fn test_in_bin() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
.file("tests/mytest.rs", "#[test] fn test_in_test() { }")
.file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
- .file("examples/myexm.rs", "#[test] fn test_in_exm() { }
- fn main() { panic!(\"Don't execute me!\"); }")
- .build();
-
- assert_that(prj.cargo("test").arg("--benches"),
- execs().with_status(0)
- .with_stderr(format!("\
+ .file(
+ "examples/myexm.rs",
+ "#[test] fn test_in_exm() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
+ .build();
+
+ assert_that(
+ prj.cargo("test").arg("--benches"),
+ execs()
+ .with_status(0)
+ .with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]mybin-[..][EXE]
-[RUNNING] target[/]debug[/]deps[/]mybench-[..][EXE]", dir = prj.url()))
- .with_stdout_contains("test test_in_bench ... ok"));
+[RUNNING] target[/]debug[/]deps[/]mybench-[..][EXE]",
+ dir = prj.url()
+ ))
+ .with_stdout_contains("test test_in_bench ... ok"),
+ );
}
#[test]
fn test_run_implicit_example_target() {
let prj = project("foo")
- .file("Cargo.toml" , r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[[bin]]
name="mybin"
path="src/mybin.rs"
- "#)
- .file("src/mybin.rs", "#[test] fn test_in_bin() { }
- fn main() { panic!(\"Don't execute me!\"); }")
+ "#,
+ )
+ .file(
+ "src/mybin.rs",
+ "#[test] fn test_in_bin() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
.file("tests/mytest.rs", "#[test] fn test_in_test() { }")
.file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
- .file("examples/myexm.rs", "#[test] fn test_in_exm() { }
- fn main() { panic!(\"Don't execute me!\"); }")
+ .file(
+ "examples/myexm.rs",
+ "#[test] fn test_in_exm() { }
+ fn main() { panic!(\"Don't execute me!\"); }",
+ )
.build();
- assert_that(prj.cargo("test").arg("--examples"),
- execs().with_status(0)
- .with_stderr(format!("\
+ assert_that(
+ prj.cargo("test").arg("--examples"),
+ execs().with_status(0).with_stderr(format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]examples[/]myexm-[..][EXE]", dir = prj.url())));
+[RUNNING] target[/]debug[/]examples[/]myexm-[..][EXE]",
+ dir = prj.url()
+ )),
+ );
}
#[test]
fn test_no_harness() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
name = "bar"
path = "foo.rs"
harness = false
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("foo.rs", "fn main() {}")
.build();
- assert_that(p.cargo("test").arg("--").arg("--nocapture"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("--").arg("--nocapture"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]bar-[..][EXE]
",
- dir = p.url())));
+ dir = p.url()
+ )),
+ );
}
#[test]
fn selective_testing() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
doctest = false
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("d1/Cargo.toml", r#"
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
[lib]
name = "d1"
doctest = false
- "#)
+ "#,
+ )
.file("d1/src/lib.rs", "")
- .file("d1/src/main.rs", "#[allow(unused_extern_crates)] extern crate d1; fn main() {}")
- .file("d2/Cargo.toml", r#"
+ .file(
+ "d1/src/main.rs",
+ "#[allow(unused_extern_crates)] extern crate d1; fn main() {}",
+ )
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.1"
[lib]
name = "d2"
doctest = false
- "#)
+ "#,
+ )
.file("d2/src/lib.rs", "")
- .file("d2/src/main.rs", "#[allow(unused_extern_crates)] extern crate d2; fn main() {}");
+ .file(
+ "d2/src/main.rs",
+ "#[allow(unused_extern_crates)] extern crate d2; fn main() {}",
+ );
let p = p.build();
println!("d1");
- assert_that(p.cargo("test").arg("-p").arg("d1"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("-p").arg("d1"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] d1 v0.0.1 ({dir}/d1)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]
-[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]", dir = p.url()))
- .with_stdout_contains_n("running 0 tests", 2));
+[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains_n("running 0 tests", 2),
+ );
println!("d2");
- assert_that(p.cargo("test").arg("-p").arg("d2"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("-p").arg("d2"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] d2 v0.0.1 ({dir}/d2)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]
-[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]", dir = p.url()))
- .with_stdout_contains_n("running 0 tests", 2));
+[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains_n("running 0 tests", 2),
+ );
println!("whole");
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", dir = p.url()))
- .with_stdout_contains("running 0 tests"));
+[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]",
+ dir = p.url()
+ ))
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
fn almost_cyclic_but_not_quite() {
let p = project("a")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
path = "b"
[dev-dependencies.c]
path = "c"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[cfg(test)] extern crate b;
#[cfg(test)] extern crate c;
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
[dependencies.a]
path = ".."
- "#)
- .file("b/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate a;
- "#)
- .file("c/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "c/Cargo.toml",
+ r#"
[package]
name = "c"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("c/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
- assert_that(p.cargo("test"),
- execs().with_status(0));
+ assert_that(p.cargo("test"), execs().with_status(0));
}
#[test]
fn build_then_selective_test() {
let p = project("a")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
[dependencies.b]
path = "b"
- "#)
- .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate b;")
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate b;",
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate b;
#[allow(unused_extern_crates)]
extern crate a;
fn main() {}
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
p.root().move_into_the_past();
- assert_that(p.cargo("test").arg("-p").arg("b"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-p").arg("b"), execs().with_status(0));
}
#[test]
fn example_dev_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.0.1"
[dev-dependencies.bar]
path = "bar"
- "#)
- .file("src/lib.rs", r#"
- "#)
- .file("examples/e1.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+ "#,
+ )
+ .file(
+ "examples/e1.rs",
+ r#"
extern crate bar;
fn main() { }
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
// make sure this file takes awhile to compile
macro_rules! f0( () => (1) );
macro_rules! f1( () => ({(f0!()) + (f0!())}) );
pub fn bar() {
f8!();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0));
- assert_that(p.cargo("run")
- .arg("--example").arg("e1").arg("--release").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test"), execs().with_status(0));
+ assert_that(
+ p.cargo("run")
+ .arg("--example")
+ .arg("e1")
+ .arg("--release")
+ .arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
fn selective_testing_with_docs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies.d1]
path = "d1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// ```
/// not valid rust
/// ```
pub fn foo() {}
- "#)
- .file("d1/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
[lib]
name = "d1"
path = "d1.rs"
- "#)
+ "#,
+ )
.file("d1/d1.rs", "");
let p = p.build();
- assert_that(p.cargo("test").arg("-p").arg("d1"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("-p").arg("d1"),
+ execs()
+ .with_status(0)
+ .with_stderr(&format!(
+ "\
[COMPILING] d1 v0.0.1 ({dir}/d1)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]d1[..][EXE]
-[DOCTEST] d1", dir = p.url()))
- .with_stdout_contains_n("running 0 tests", 2));
+[DOCTEST] d1",
+ dir = p.url()
+ ))
+ .with_stdout_contains_n("running 0 tests", 2),
+ );
}
#[test]
fn example_bin_same_name() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#)
.file("examples/foo.rs", r#"fn main() { println!("example"); }"#)
.build();
- assert_that(p.cargo("test").arg("--no-run").arg("-v"),
- execs().with_status(0)
- .with_stderr(&format!("\
+ assert_that(
+ p.cargo("test").arg("--no-run").arg("-v"),
+ execs().with_status(0).with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({dir})
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", dir = p.url())));
+",
+ dir = p.url()
+ )),
+ );
assert_that(&p.bin("foo"), is_not(existing_file()));
assert_that(&p.bin("examples/foo"), existing_file());
- assert_that(p.process(&p.bin("examples/foo")),
- execs().with_status(0).with_stdout("example\n"));
-
- assert_that(p.cargo("run"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.process(&p.bin("examples/foo")),
+ execs().with_status(0).with_stdout("example\n"),
+ );
+
+ assert_that(
+ p.cargo("run"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] [..]")
- .with_stdout("\
+[RUNNING] [..]",
+ )
+ .with_stdout(
+ "\
bin
-"));
+",
+ ),
+ );
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn test_with_example_twice() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#)
.file("examples/foo.rs", r#"fn main() { println!("example"); }"#)
.build();
println!("first");
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
assert_that(&p.bin("examples/foo"), existing_file());
println!("second");
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
assert_that(&p.bin("examples/foo"), existing_file());
}
#[test]
fn example_with_dev_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dev-dependencies.a]
path = "a"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("examples/ex.rs", "#[allow(unused_extern_crates)] extern crate a; fn main() {}")
- .file("a/Cargo.toml", r#"
+ .file(
+ "examples/ex.rs",
+ "#[allow(unused_extern_crates)] extern crate a; fn main() {}",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("test").arg("-v"),
+ execs().with_status(0).with_stderr(
+ "\
[..]
[..]
[..]
[..]
[RUNNING] `rustc --crate-name ex [..] --extern a=[..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn bin_is_preserved() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.build();
- assert_that(p.cargo("build").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
println!("testing");
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn bad_example() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("run").arg("--example").arg("foo"),
- execs().with_status(101).with_stderr("\
+ assert_that(
+ p.cargo("run").arg("--example").arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] no example target named `foo`
-"));
- assert_that(p.cargo("run").arg("--bin").arg("foo"),
- execs().with_status(101).with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("run").arg("--bin").arg("foo"),
+ execs().with_status(101).with_stderr(
+ "\
[ERROR] no bin target named `foo`
-"));
+",
+ ),
+ );
}
#[test]
fn doctest_feature() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[features]
bar = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// ```rust
/// assert_eq!(foo::foo(), 1);
/// ```
#[cfg(feature = "bar")]
pub fn foo() -> i32 { 1 }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("--features").arg("bar"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("test").arg("--features").arg("bar"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo [..]
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo[..][EXE]
-[DOCTEST] foo")
- .with_stdout_contains("running 0 tests")
- .with_stdout_contains("test [..] ... ok"));
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("running 0 tests")
+ .with_stdout_contains("test [..] ... ok"),
+ );
}
#[test]
fn dashes_to_underscores() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo-bar"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// ```
/// assert_eq!(foo_bar::foo(), 1);
/// ```
pub fn foo() -> i32 { 1 }
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
}
#[test]
fn doctest_dev_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dev-dependencies]
b = { path = "b" }
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// ```
/// extern crate b;
/// ```
pub fn foo() {}
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[package]
name = "b"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("b/src/lib.rs", "")
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
}
#[test]
fn filter_no_doc_tests() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// ```
/// extern crate b;
/// ```
pub fn foo() {}
- "#)
+ "#,
+ )
.file("tests/foo.rs", "")
.build();
- assert_that(p.cargo("test").arg("--test=foo"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("test").arg("--test=foo"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]deps[/]foo[..][EXE]")
- .with_stdout_contains("running 0 tests"));
+[RUNNING] target[/]debug[/]deps[/]foo[..][EXE]",
+ )
+ .with_stdout_contains("running 0 tests"),
+ );
}
#[test]
fn dylib_doctest() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
name = "foo"
crate-type = ["rlib", "dylib"]
test = false
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// ```
/// foo::foo();
/// ```
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[DOCTEST] foo")
- .with_stdout_contains("test [..] ... ok"));
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test [..] ... ok"),
+ );
}
#[test]
fn dylib_doctest2() {
// can't doctest dylibs as they're statically linked together
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
name = "foo"
crate-type = ["dylib"]
test = false
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// ```
/// foo::foo();
/// ```
pub fn foo() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("test"), execs().with_status(0).with_stdout(""));
}
#[test]
fn cyclic_dev_dep_doc_test() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dev-dependencies]
bar = { path = "bar" }
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
//! ```
//! extern crate bar;
//! ```
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies]
foo = { path = ".." }
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[allow(unused_extern_crates)]
extern crate foo;
- "#)
- .build();
- assert_that(p.cargo("test"),
- execs().with_status(0).with_stderr("\
+ "#,
+ )
+ .build();
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[COMPILING] bar v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo[..][EXE]
-[DOCTEST] foo")
- .with_stdout_contains("running 0 tests")
- .with_stdout_contains("test [..] ... ok"));
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("running 0 tests")
+ .with_stdout_contains("test [..] ... ok"),
+ );
}
#[test]
fn dev_dep_with_build_script() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dev-dependencies]
bar = { path = "bar" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("examples/foo.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
build = "build.rs"
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.file("bar/build.rs", "fn main() {}")
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0));
+ assert_that(p.cargo("test"), execs().with_status(0));
}
#[test]
fn no_fail_fast() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn add_one(x: i32) -> i32{
x + 1
}
pub fn sub_one(x: i32) -> i32{
x - 1
}
- "#)
- .file("tests/test_add_one.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/test_add_one.rs",
+ r#"
extern crate foo;
use foo::*;
fn fail_add_one_test() {
assert_eq!(add_one(1), 1);
}
- "#)
- .file("tests/test_sub_one.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/test_sub_one.rs",
+ r#"
extern crate foo;
use foo::*;
fn sub_one_test() {
assert_eq!(sub_one(1), 0);
}
- "#)
- .build();
- assert_that(p.cargo("test").arg("--no-fail-fast"),
- execs().with_status(101)
- .with_stderr_contains("\
+ "#,
+ )
+ .build();
+ assert_that(
+ p.cargo("test").arg("--no-fail-fast"),
+ execs()
+ .with_status(101)
+ .with_stderr_contains(
+ "\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
-[RUNNING] target[/]debug[/]deps[/]test_add_one-[..][EXE]")
- .with_stdout_contains("running 0 tests")
- .with_stderr_contains("\
+[RUNNING] target[/]debug[/]deps[/]test_add_one-[..][EXE]",
+ )
+ .with_stdout_contains("running 0 tests")
+ .with_stderr_contains(
+ "\
[RUNNING] target[/]debug[/]deps[/]test_sub_one-[..][EXE]
-[DOCTEST] foo")
- .with_stdout_contains("test result: FAILED. [..]")
- .with_stdout_contains("test sub_one_test ... ok")
- .with_stdout_contains_n("test [..] ... ok", 3));
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test result: FAILED. [..]")
+ .with_stdout_contains("test sub_one_test ... ok")
+ .with_stdout_contains_n("test [..] ... ok", 3),
+ );
}
#[test]
fn test_multiple_packages() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
doctest = false
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("d1/Cargo.toml", r#"
+ .file(
+ "d1/Cargo.toml",
+ r#"
[package]
name = "d1"
version = "0.0.1"
[lib]
name = "d1"
doctest = false
- "#)
+ "#,
+ )
.file("d1/src/lib.rs", "")
- .file("d2/Cargo.toml", r#"
+ .file(
+ "d2/Cargo.toml",
+ r#"
[package]
name = "d2"
version = "0.0.1"
[lib]
name = "d2"
doctest = false
- "#)
+ "#,
+ )
.file("d2/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("test").arg("-p").arg("d1").arg("-p").arg("d2"),
- execs().with_status(0)
- .with_stderr_contains("\
-[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]")
- .with_stderr_contains("\
-[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]")
- .with_stdout_contains_n("running 0 tests", 2));
+ assert_that(
+ p.cargo("test").arg("-p").arg("d1").arg("-p").arg("d2"),
+ execs()
+ .with_status(0)
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]",
+ )
+ .with_stderr_contains(
+ "\
+ [RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]",
+ )
+ .with_stdout_contains_n("running 0 tests", 2),
+ );
}
#[test]
fn bin_does_not_rebuild_tests() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("tests/foo.rs", "");
let p = p.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
sleep_ms(1000);
- File::create(&p.root().join("src/main.rs")).unwrap()
- .write_all(b"fn main() { 3; }").unwrap();
-
- assert_that(p.cargo("test").arg("-v").arg("--no-run"),
- execs().with_status(0)
- .with_stderr("\
+ File::create(&p.root().join("src/main.rs"))
+ .unwrap()
+ .write_all(b"fn main() { 3; }")
+ .unwrap();
+
+ assert_that(
+ p.cargo("test").arg("-v").arg("--no-run"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] src[/]main.rs [..]`
[RUNNING] `rustc [..] src[/]main.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn selective_test_wonky_profile() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
a = { path = "a" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("test").arg("-v").arg("--no-run").arg("--release")
- .arg("-p").arg("foo").arg("-p").arg("a"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("test")
+ .arg("-v")
+ .arg("--no-run")
+ .arg("--release")
+ .arg("-p")
+ .arg("foo")
+ .arg("-p")
+ .arg("a"),
+ execs().with_status(0),
+ );
}
#[test]
fn selective_test_optional_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
a = { path = "a", optional = true }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("test").arg("-v").arg("--no-run")
- .arg("--features").arg("a").arg("-p").arg("a"),
- execs().with_status(0).with_stderr("\
+ assert_that(
+ p.cargo("test")
+ .arg("-v")
+ .arg("--no-run")
+ .arg("--features")
+ .arg("a")
+ .arg("-p")
+ .arg("a"),
+ execs().with_status(0).with_stderr(
+ "\
[COMPILING] a v0.0.1 ([..])
[RUNNING] `rustc [..] a[/]src[/]lib.rs [..]`
[RUNNING] `rustc [..] a[/]src[/]lib.rs [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn only_test_docs() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[test]
fn foo() {
let a: u32 = "hello";
/// ```
pub fn bar() {
}
- "#)
+ "#,
+ )
.file("tests/foo.rs", "this is not rust");
let p = p.build();
- assert_that(p.cargo("test").arg("--doc"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("test").arg("--doc"),
+ execs()
+ .with_status(0)
+ .with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[DOCTEST] foo")
- .with_stdout_contains("test [..] ... ok"));
+[DOCTEST] foo",
+ )
+ .with_stdout_contains("test [..] ... ok"),
+ );
}
#[test]
fn test_panic_abort_with_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[profile.dev]
panic = 'abort'
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
extern crate bar;
#[test]
fn foo() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "")
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
}
#[test]
fn cfg_test_even_with_no_harness() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
harness = false
doctest = false
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[cfg(test)]
fn main() {
println!("hello!");
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("-v"),
- execs().with_status(0)
- .with_stdout("hello!\n")
- .with_stderr("\
+ assert_that(
+ p.cargo("test").arg("-v"),
+ execs().with_status(0).with_stdout("hello!\n").with_stderr(
+ "\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..]`
-"));
+",
+ ),
+ );
}
#[test]
fn panic_abort_multiple() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[profile.release]
panic = 'abort'
- "#)
- .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate a;")
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate a;",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("test")
- .arg("--release").arg("-v")
- .arg("-p").arg("foo")
- .arg("-p").arg("a"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("test")
+ .arg("--release")
+ .arg("-v")
+ .arg("-p")
+ .arg("foo")
+ .arg("-p")
+ .arg("a"),
+ execs().with_status(0),
+ );
}
#[test]
fn pass_correct_cfgs_flags_to_rustdoc() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies.feature_a]
path = "libs/feature_a"
default-features = false
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[cfg(test)]
mod tests {
#[test]
assert!(true);
}
}
- "#)
- .file("libs/feature_a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "libs/feature_a/Cargo.toml",
+ r#"
[package]
name = "feature_a"
version = "0.1.0"
[build-dependencies]
mock_serde_codegen = { path = "../mock_serde_codegen", optional = true }
- "#)
- .file("libs/feature_a/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "libs/feature_a/src/lib.rs",
+ r#"
#[cfg(feature = "mock_serde_derive")]
const MSG: &'static str = "This is safe";
pub fn get() -> &'static str {
MSG
}
- "#)
- .file("libs/mock_serde_derive/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "libs/mock_serde_derive/Cargo.toml",
+ r#"
[package]
name = "mock_serde_derive"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("libs/mock_serde_derive/src/lib.rs", "")
- .file("libs/mock_serde_codegen/Cargo.toml", r#"
+ .file(
+ "libs/mock_serde_codegen/Cargo.toml",
+ r#"
[package]
name = "mock_serde_codegen"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("libs/mock_serde_codegen/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("test")
- .arg("--package").arg("feature_a")
- .arg("--verbose"),
- execs().with_status(0)
- .with_stderr_contains("\
+ assert_that(
+ p.cargo("test")
+ .arg("--package")
+ .arg("feature_a")
+ .arg("--verbose"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[DOCTEST] feature_a
-[RUNNING] `rustdoc --test [..]mock_serde_codegen[..]`"));
-
- assert_that(p.cargo("test")
- .arg("--verbose"),
- execs().with_status(0)
- .with_stderr_contains("\
+[RUNNING] `rustdoc --test [..]mock_serde_codegen[..]`",
+ ),
+ );
+
+ assert_that(
+ p.cargo("test").arg("--verbose"),
+ execs().with_status(0).with_stderr_contains(
+ "\
[DOCTEST] foo
-[RUNNING] `rustdoc --test [..]feature_a[..]`"));
+[RUNNING] `rustdoc --test [..]feature_a[..]`",
+ ),
+ );
}
#[test]
fn test_release_ignore_panic() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
panic = 'abort'
[profile.release]
panic = 'abort'
- "#)
- .file("src/lib.rs", "#[allow(unused_extern_crates)] extern crate a;")
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ "#[allow(unused_extern_crates)] extern crate a;",
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "");
let p = p.build();
println!("test");
#[test]
fn test_many_with_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
foo = []
[workspace]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.0.1"
authors = []
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "")
.build();
- assert_that(p.cargo("test").arg("-v")
- .arg("-p").arg("a")
- .arg("-p").arg("foo")
- .arg("--features").arg("foo"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("test")
+ .arg("-v")
+ .arg("-p")
+ .arg("a")
+ .arg("-p")
+ .arg("foo")
+ .arg("--features")
+ .arg("foo"),
+ execs().with_status(0),
+ );
}
#[test]
fn test_all_workspace() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
bar = { path = "bar" }
[workspace]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
#[test]
fn foo_test() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[test]
fn bar_test() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test")
- .arg("--all"),
- execs().with_status(0)
- .with_stdout_contains("test foo_test ... ok")
- .with_stdout_contains("test bar_test ... ok"));
+ assert_that(
+ p.cargo("test").arg("--all"),
+ execs()
+ .with_status(0)
+ .with_stdout_contains("test foo_test ... ok")
+ .with_stdout_contains("test bar_test ... ok"),
+ );
}
#[test]
fn test_all_exclude() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar", "baz"]
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
fn main() {}
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
#[test]
pub fn bar() {}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.1.0"
- "#)
- .file("baz/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "baz/src/lib.rs",
+ r#"
#[test]
pub fn baz() {
assert!(false);
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test")
- .arg("--all")
- .arg("--exclude")
- .arg("baz"),
- execs().with_status(0)
- .with_stdout_contains("running 1 test
-test bar ... ok"));
+ assert_that(
+ p.cargo("test").arg("--all").arg("--exclude").arg("baz"),
+ execs().with_status(0).with_stdout_contains(
+ "running 1 test
+test bar ... ok",
+ ),
+ );
}
#[test]
fn test_all_virtual_manifest() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["a", "b"]
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.1.0"
- "#)
- .file("a/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
#[test]
fn a() {}
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.1.0"
- "#)
- .file("b/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
#[test]
fn b() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test")
- .arg("--all"),
- execs().with_status(0)
- .with_stdout_contains("test a ... ok")
- .with_stdout_contains("test b ... ok"));
+ assert_that(
+ p.cargo("test").arg("--all"),
+ execs()
+ .with_status(0)
+ .with_stdout_contains("test a ... ok")
+ .with_stdout_contains("test b ... ok"),
+ );
}
#[test]
fn test_virtual_manifest_all_implied() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["a", "b"]
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.1.0"
- "#)
- .file("a/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
#[test]
fn a() {}
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.1.0"
- "#)
- .file("b/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
#[test]
fn b() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test"),
- execs().with_status(0)
- .with_stdout_contains("test a ... ok")
- .with_stdout_contains("test b ... ok"));
+ assert_that(
+ p.cargo("test"),
+ execs()
+ .with_status(0)
+ .with_stdout_contains("test a ... ok")
+ .with_stdout_contains("test b ... ok"),
+ );
}
#[test]
fn test_all_member_dependency_same_name() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["a"]
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.1.0"
[dependencies]
a = "0.1.0"
- "#)
- .file("a/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
#[test]
fn a() {}
- "#)
+ "#,
+ )
.build();
Package::new("a", "0.1.0").publish();
- assert_that(p.cargo("test")
- .arg("--all"),
- execs().with_status(0)
- .with_stdout_contains("test a ... ok"));
+ assert_that(
+ p.cargo("test").arg("--all"),
+ execs().with_status(0).with_stdout_contains("test a ... ok"),
+ );
}
#[test]
fn doctest_only_with_dev_dep() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.1.0"
[dev-dependencies]
b = { path = "b" }
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// ```
/// extern crate b;
///
/// b::b();
/// ```
pub fn a() {}
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.1.0"
- "#)
- .file("b/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
pub fn b() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("--doc").arg("-v"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("test").arg("--doc").arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
fn test_many_targets() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("src/bin/a.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/a.rs",
+ r#"
fn main() {}
#[test] fn bin_a() {}
- "#)
- .file("src/bin/b.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/b.rs",
+ r#"
fn main() {}
#[test] fn bin_b() {}
- "#)
- .file("src/bin/c.rs", r#"
+ "#,
+ )
+ .file(
+ "src/bin/c.rs",
+ r#"
fn main() {}
#[test] fn bin_c() { panic!(); }
- "#)
- .file("examples/a.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/a.rs",
+ r#"
fn main() {}
#[test] fn example_a() {}
- "#)
- .file("examples/b.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/b.rs",
+ r#"
fn main() {}
#[test] fn example_b() {}
- "#)
- .file("examples/c.rs", r#"
+ "#,
+ )
+ .file(
+ "examples/c.rs",
+ r#"
#[test] fn example_c() { panic!(); }
- "#)
- .file("tests/a.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/a.rs",
+ r#"
#[test] fn test_a() {}
- "#)
- .file("tests/b.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/b.rs",
+ r#"
#[test] fn test_b() {}
- "#)
- .file("tests/c.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/c.rs",
+ r#"
does not compile
- "#)
- .build();
-
- assert_that(p.cargo("test").arg("--verbose")
- .arg("--bin").arg("a").arg("--bin").arg("b")
- .arg("--example").arg("a").arg("--example").arg("b")
- .arg("--test").arg("a").arg("--test").arg("b"),
- execs()
- .with_status(0)
- .with_stdout_contains("test bin_a ... ok")
- .with_stdout_contains("test bin_b ... ok")
- .with_stdout_contains("test test_a ... ok")
- .with_stdout_contains("test test_b ... ok")
- .with_stderr_contains("[RUNNING] `rustc --crate-name a examples[/]a.rs [..]`")
- .with_stderr_contains("[RUNNING] `rustc --crate-name b examples[/]b.rs [..]`"))
+ "#,
+ )
+ .build();
+
+ assert_that(
+ p.cargo("test")
+ .arg("--verbose")
+ .arg("--bin")
+ .arg("a")
+ .arg("--bin")
+ .arg("b")
+ .arg("--example")
+ .arg("a")
+ .arg("--example")
+ .arg("b")
+ .arg("--test")
+ .arg("a")
+ .arg("--test")
+ .arg("b"),
+ execs()
+ .with_status(0)
+ .with_stdout_contains("test bin_a ... ok")
+ .with_stdout_contains("test bin_b ... ok")
+ .with_stdout_contains("test test_a ... ok")
+ .with_stdout_contains("test test_b ... ok")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name a examples[/]a.rs [..]`")
+ .with_stderr_contains("[RUNNING] `rustc --crate-name b examples[/]b.rs [..]`"),
+ )
}
#[test]
fn doctest_and_registry() {
let p = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.1.0"
c = { path = "c" }
[workspace]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("b/Cargo.toml", r#"
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.1.0"
- "#)
- .file("b/src/lib.rs", "
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ "
/// ```
/// b::foo();
/// ```
pub fn foo() {}
- ")
- .file("c/Cargo.toml", r#"
+ ",
+ )
+ .file(
+ "c/Cargo.toml",
+ r#"
[project]
name = "c"
version = "0.1.0"
[dependencies]
b = "0.1"
- "#)
+ "#,
+ )
.file("c/src/lib.rs", "")
.build();
Package::new("b", "0.1.0").publish();
- assert_that(p.cargo("test").arg("--all").arg("-v"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("test").arg("--all").arg("-v"),
+ execs().with_status(0),
+ );
}
#[test]
fn cargo_test_env() {
- let src = format!(r#"
+ let src = format!(
+ r#"
#![crate_type = "rlib"]
#[test]
use std::env;
println!("{{}}", env::var("{}").unwrap());
}}
- "#, cargo::CARGO_ENV);
+ "#,
+ cargo::CARGO_ENV
+ );
let p = project("env_test")
.file("Cargo.toml", &basic_lib_manifest("env_test"))
let mut pr = p.cargo("test");
let cargo = cargo_exe().canonicalize().unwrap();
- assert_that(pr.args(&["--lib", "--", "--nocapture"]),
- execs().with_status(0)
- .with_stdout_contains(format!("\
+ assert_that(
+ pr.args(&["--lib", "--", "--nocapture"]),
+ execs().with_status(0).with_stdout_contains(format!(
+ "\
{}
test env_test ... ok
-", cargo.to_str().unwrap())));
+",
+ cargo.to_str().unwrap()
+ )),
+ );
}
#[test]
fn test_order() {
- let p = project("foo")
- .file("Cargo.toml", r#"
+ let p = project("foo")
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[test] fn test_lib() {}
- "#)
- .file("tests/a.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/a.rs",
+ r#"
#[test] fn test_a() {}
- "#)
- .file("tests/z.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/z.rs",
+ r#"
#[test] fn test_z() {}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("--all"),
- execs().with_status(0)
- .with_stdout_contains("
+ assert_that(
+ p.cargo("test").arg("--all"),
+ execs().with_status(0).with_stdout_contains(
+ "
running 1 test
test test_lib ... ok
test test_z ... ok
test result: ok. [..]
-"));
-
+",
+ ),
+ );
}
#[test]
fn cyclic_dev() {
- let p = project("foo")
- .file("Cargo.toml", r#"
+ let p = project("foo")
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dev-dependencies]
foo = { path = "." }
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
#[test] fn test_lib() {}
- "#)
- .file("tests/foo.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/foo.rs",
+ r#"
extern crate foo;
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test").arg("--all"),
- execs().with_status(0));
+ assert_that(p.cargo("test").arg("--all"), execs().with_status(0));
}
#[test]
.publish();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
testless = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("test"), execs().with_status(0));
- assert_that(p.cargo("test").arg("--package").arg("testless"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("test").arg("--package").arg("testless"),
+ execs().with_status(0),
+ );
}
#[test]
fn find_dependency_of_proc_macro_dependency_with_target() {
let workspace = project("workspace")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["root", "proc_macro_dep"]
- "#)
- .file("root/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "root/Cargo.toml",
+ r#"
[project]
name = "root"
version = "0.1.0"
[dependencies]
proc_macro_dep = { path = "../proc_macro_dep" }
- "#)
- .file("root/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "root/src/lib.rs",
+ r#"
#[macro_use]
extern crate proc_macro_dep;
#[derive(Noop)]
pub struct X;
- "#)
- .file("proc_macro_dep/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "proc_macro_dep/Cargo.toml",
+ r#"
[project]
name = "proc_macro_dep"
version = "0.1.0"
[dependencies]
bar = "^0.1"
- "#)
- .file("proc_macro_dep/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "proc_macro_dep/src/lib.rs",
+ r#"
extern crate bar;
extern crate proc_macro;
use proc_macro::TokenStream;
pub fn noop(_input: TokenStream) -> TokenStream {
"".parse().unwrap()
}
- "#)
+ "#,
+ )
.build();
Package::new("foo", "0.1.0").publish();
Package::new("bar", "0.1.0")
.dep("foo", "0.1")
.file("src/lib.rs", "extern crate foo;")
.publish();
- assert_that(workspace.cargo("test").arg("--all").arg("--target").arg(rustc_host()),
- execs().with_status(0));
+ assert_that(
+ workspace
+ .cargo("test")
+ .arg("--all")
+ .arg("--target")
+ .arg(rustc_host()),
+ execs().with_status(0),
+ );
}
#[test]
fn test_hint_not_masked_by_doctest() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
- .file("src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
/// ```
/// assert_eq!(1, 1);
/// ```
pub fn this_works() {}
- "#)
- .file("tests/integ.rs", r#"
+ "#,
+ )
+ .file(
+ "tests/integ.rs",
+ r#"
#[test]
fn this_fails() {
panic!();
}
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("test")
- .arg("--no-fail-fast"),
- execs()
- .with_status(101)
- .with_stdout_contains("test this_fails ... FAILED")
- .with_stdout_contains("[..]this_works (line [..]ok")
- .with_stderr_contains("[ERROR] test failed, to rerun pass \
- '--test integ'"));
+ assert_that(
+ p.cargo("test").arg("--no-fail-fast"),
+ execs()
+ .with_status(101)
+ .with_stdout_contains("test this_fails ... FAILED")
+ .with_stdout_contains("[..]this_works (line [..]ok")
+ .with_stderr_contains(
+ "[ERROR] test failed, to rerun pass \
+ '--test integ'",
+ ),
+ );
}
#[test]
fn test_hint_workspace() {
- let workspace = project("workspace")
- .file("Cargo.toml", r#"
+ let workspace = project("workspace")
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["a", "b"]
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.1.0"
- "#)
- .file("a/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "a/src/lib.rs",
+ r#"
#[test]
fn t1() {}
- "#)
- .file("b/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "b/Cargo.toml",
+ r#"
[project]
name = "b"
version = "0.1.0"
- "#)
- .file("b/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "b/src/lib.rs",
+ r#"
#[test]
fn t1() {assert!(false)}
- "#)
+ "#,
+ )
.build();
- assert_that(workspace.cargo("test"),
- execs().with_stderr_contains(
- "[ERROR] test failed, to rerun pass '-p b --lib'")
- .with_status(101));
+ assert_that(
+ workspace.cargo("test"),
+ execs()
+ .with_stderr_contains("[ERROR] test failed, to rerun pass '-p b --lib'")
+ .with_status(101),
+ );
}
use cargotest::rustc_host;
-use cargotest::support::{path2url, project, execs};
+use cargotest::support::{execs, project, path2url};
use hamcrest::assert_that;
#[test]
let target = rustc_host();
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}]
ar = "nonexistent-ar"
linker = "nonexistent-linker"
- "#, target))
+ "#,
+ target
+ ),
+ )
.build();
- assert_that(foo.cargo("build").arg("--verbose"),
- execs().with_stderr(&format!("\
+ assert_that(
+ foo.cargo("build").arg("--verbose"),
+ execs().with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = foo.url())))
+",
+ url = foo.url()
+ )),
+ )
}
#[test]
// Escaped as they appear within a TOML config file
let config = if cfg!(windows) {
- (r#"C:\\bogus\\nonexistent-ar"#, r#"C:\\bogus\\nonexistent-linker"#)
+ (
+ r#"C:\\bogus\\nonexistent-ar"#,
+ r#"C:\\bogus\\nonexistent-linker"#,
+ )
} else {
(r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#)
};
let foo = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{target}]
ar = "{ar}"
linker = "{linker}"
- "#, target = target, ar = config.0, linker = config.1))
+ "#,
+ target = target,
+ ar = config.0,
+ linker = config.1
+ ),
+ )
.build();
let output = if cfg!(windows) {
- (r#"C:\bogus\nonexistent-ar"#, r#"C:\bogus\nonexistent-linker"#)
+ (
+ r#"C:\bogus\nonexistent-ar"#,
+ r#"C:\bogus\nonexistent-linker"#,
+ )
} else {
(r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#)
};
- assert_that(foo.cargo("build").arg("--verbose"),
- execs().with_stderr(&format!("\
+ assert_that(
+ foo.cargo("build").arg("--verbose"),
+ execs().with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc [..] -C ar={ar} -C linker={linker} [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = foo.url(), ar = output.0, linker = output.1)))
+",
+ url = foo.url(),
+ ar = output.0,
+ linker = output.1
+ )),
+ )
}
#[test]
// Funky directory structure to test that relative tool paths are made absolute
// by reference to the `.cargo/..` directory and not to (for example) the CWD.
let origin = project("origin")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
name = "foo"
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{target}]
ar = "{ar}"
linker = "{linker}"
- "#, target = target, ar = config.0, linker = config.1))
+ "#,
+ target = target,
+ ar = config.0,
+ linker = config.1
+ ),
+ )
.build();
let foo_path = origin.root().join("foo");
let foo_url = path2url(foo_path.clone());
let prefix = origin.root().into_os_string().into_string().unwrap();
let output = if cfg!(windows) {
- (format!(r#"{}\.\nonexistent-ar"#, prefix),
- format!(r#"{}\.\tools\nonexistent-linker"#, prefix))
+ (
+ format!(r#"{}\.\nonexistent-ar"#, prefix),
+ format!(r#"{}\.\tools\nonexistent-linker"#, prefix),
+ )
} else {
- (format!(r#"{}/./nonexistent-ar"#, prefix),
- format!(r#"{}/./tools/nonexistent-linker"#, prefix))
+ (
+ format!(r#"{}/./nonexistent-ar"#, prefix),
+ format!(r#"{}/./tools/nonexistent-linker"#, prefix),
+ )
};
- assert_that(origin.cargo("build").cwd(foo_path).arg("--verbose"),
- execs().with_stderr(&format!("\
+ assert_that(
+ origin.cargo("build").cwd(foo_path).arg("--verbose"),
+ execs().with_stderr(&format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc [..] -C ar={ar} -C linker={linker} [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-", url = foo_url, ar = output.0, linker = output.1)))
+",
+ url = foo_url,
+ ar = output.0,
+ linker = output.1
+ )),
+ )
}
#[test]
let target = rustc_host();
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.0.1"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
.file("tests/test.rs", "")
.file("benches/bench.rs", "")
- .file(".cargo/config", &format!(r#"
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
[target.{}]
runner = "nonexistent-runner -r"
- "#, target))
+ "#,
+ target
+ ),
+ )
.build();
- assert_that(p.cargo("run").args(&["--", "--param"]),
- execs().with_stderr_contains(&format!("\
+ assert_that(
+ p.cargo("run").args(&["--", "--param"]),
+ execs().with_stderr_contains(&format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `nonexistent-runner -r target[/]debug[/]foo[EXE] --param`
-", url = p.url())));
+",
+ url = p.url()
+ )),
+ );
- assert_that(p.cargo("test").args(&["--test", "test", "--verbose", "--", "--param"]),
- execs().with_stderr_contains(&format!("\
+ assert_that(
+ p.cargo("test")
+ .args(&["--test", "test", "--verbose", "--", "--param"]),
+ execs().with_stderr_contains(&format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `nonexistent-runner -r [..][/]target[/]debug[/]deps[/]test-[..][EXE] --param`
-", url = p.url())));
+",
+ url = p.url()
+ )),
+ );
- assert_that(p.cargo("bench").args(&["--bench", "bench", "--verbose", "--", "--param"]),
- execs().with_stderr_contains(&format!("\
+ assert_that(
+ p.cargo("bench")
+ .args(&["--bench", "bench", "--verbose", "--", "--param"]),
+ execs().with_stderr_contains(&format!(
+ "\
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `nonexistent-runner -r [..][/]target[/]release[/]deps[/]bench-[..][EXE] --param --bench`
-", url = p.url())));
+",
+ url = p.url()
+ )),
+ );
}
-use cargotest::support::{project, execs, main_file, basic_bin_manifest};
-use hamcrest::{assert_that};
+use cargotest::support::{basic_bin_manifest, execs, main_file, project};
+use hamcrest::assert_that;
fn verify_project_success_output() -> String {
r#"{"success":"true"}"#.into()
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("verify-project")
- .arg("--manifest-path").arg("foo/Cargo.toml")
- .cwd(p.root().parent().unwrap()),
- execs().with_status(0)
- .with_stdout(verify_project_success_output()));
+ assert_that(
+ p.cargo("verify-project")
+ .arg("--manifest-path")
+ .arg("foo/Cargo.toml")
+ .cwd(p.root().parent().unwrap()),
+ execs()
+ .with_status(0)
+ .with_stdout(verify_project_success_output()),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("verify-project")
- .arg("--manifest-path").arg(p.root().join("Cargo.toml"))
- .cwd(p.root().parent().unwrap()),
- execs().with_status(0)
- .with_stdout(verify_project_success_output()));
+ assert_that(
+ p.cargo("verify-project")
+ .arg("--manifest-path")
+ .arg(p.root().join("Cargo.toml"))
+ .cwd(p.root().parent().unwrap()),
+ execs()
+ .with_status(0)
+ .with_stdout(verify_project_success_output()),
+ );
}
#[test]
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
- assert_that(p.cargo("verify-project")
- .cwd(p.root()),
- execs().with_status(0)
- .with_stdout(verify_project_success_output()));
+ assert_that(
+ p.cargo("verify-project").cwd(p.root()),
+ execs()
+ .with_status(0)
+ .with_stdout(verify_project_success_output()),
+ );
}
use cargo;
-use cargotest::support::{project, execs};
+use cargotest::support::{execs, project};
use hamcrest::assert_that;
#[test]
fn simple() {
let p = project("foo").build();
- assert_that(p.cargo("version"),
- execs().with_status(0).with_stdout(&format!("{}\n",
- cargo::version())));
-
- assert_that(p.cargo("--version"),
- execs().with_status(0).with_stdout(&format!("{}\n",
- cargo::version())));
-
+ assert_that(
+ p.cargo("version"),
+ execs()
+ .with_status(0)
+ .with_stdout(&format!("{}\n", cargo::version())),
+ );
+
+ assert_that(
+ p.cargo("--version"),
+ execs()
+ .with_status(0)
+ .with_stdout(&format!("{}\n", cargo::version())),
+ );
}
-
#[test]
#[cfg_attr(target_os = "windows", ignore)]
fn version_works_without_rustc() {
let p = project("foo").build();
- assert_that(p.cargo("version").env("PATH", ""),
- execs().with_status(0));
+ assert_that(p.cargo("version").env("PATH", ""), execs().with_status(0));
}
#[test]
let p = project("foo")
.file(".cargo/config", "this is not toml")
.build();
- assert_that(p.cargo("version"),
- execs().with_status(0));
+ assert_that(p.cargo("version"), execs().with_status(0));
}
#[test]
fn version_works_with_bad_target_dir() {
let p = project("foo")
- .file(".cargo/config", r#"
+ .file(
+ ".cargo/config",
+ r#"
[build]
target-dir = 4
- "#)
+ "#,
+ )
.build();
- assert_that(p.cargo("version"),
- execs().with_status(0));
+ assert_that(p.cargo("version"), execs().with_status(0));
}
-use cargotest::support::{project, execs, Project};
+use cargotest::support::{execs, project, Project};
use cargotest::support::registry::Package;
use hamcrest::assert_that;
fn make_lib(lib_src: &str) {
Package::new("foo", "0.0.1")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
build = "build.rs"
- "#)
- .file("build.rs", &format!(r#"
+ "#,
+ )
+ .file(
+ "build.rs",
+ &format!(
+ r#"
fn main() {{
use std::io::Write;
println!("cargo:warning={{}}", "{}");
write!(&mut ::std::io::stderr(), "hidden stderr");
println!("cargo:warning={{}}", "{}");
}}
- "#, WARNING1, WARNING2))
+ "#,
+ WARNING1, WARNING2
+ ),
+ )
.file("src/lib.rs", &format!("fn f() {{ {} }}", lib_src))
.publish();
}
fn make_upstream(main_src: &str) -> Project {
project("bar")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "bar"
version = "0.0.1"
[dependencies]
foo = "*"
- "#)
+ "#,
+ )
.file("src/main.rs", &format!("fn main() {{ {} }}", main_src))
.build()
}
fn no_warning_on_success() {
make_lib("");
let upstream = make_upstream("");
- assert_that(upstream.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ upstream.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] foo v0.0.1 ([..])
[COMPILING] foo v0.0.1
[COMPILING] bar v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn no_warning_on_bin_failure() {
make_lib("");
let upstream = make_upstream("hi()");
- assert_that(upstream.cargo("build"),
- execs().with_status(101)
- .with_stdout_does_not_contain("hidden stdout")
- .with_stderr_does_not_contain("hidden stderr")
- .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1))
- .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2))
- .with_stderr_contains("[UPDATING] registry `[..]`")
- .with_stderr_contains("[DOWNLOADING] foo v0.0.1 ([..])")
- .with_stderr_contains("[COMPILING] foo v0.0.1")
- .with_stderr_contains("[COMPILING] bar v0.0.1 ([..])"));
+ assert_that(
+ upstream.cargo("build"),
+ execs()
+ .with_status(101)
+ .with_stdout_does_not_contain("hidden stdout")
+ .with_stderr_does_not_contain("hidden stderr")
+ .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1))
+ .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2))
+ .with_stderr_contains("[UPDATING] registry `[..]`")
+ .with_stderr_contains("[DOWNLOADING] foo v0.0.1 ([..])")
+ .with_stderr_contains("[COMPILING] foo v0.0.1")
+ .with_stderr_contains("[COMPILING] bar v0.0.1 ([..])"),
+ );
}
#[test]
fn warning_on_lib_failure() {
make_lib("err()");
let upstream = make_upstream("");
- assert_that(upstream.cargo("build"),
- execs().with_status(101)
- .with_stdout_does_not_contain("hidden stdout")
- .with_stderr_does_not_contain("hidden stderr")
- .with_stderr_does_not_contain("[COMPILING] bar v0.0.1 ([..])")
- .with_stderr_contains("[UPDATING] registry `[..]`")
- .with_stderr_contains("[DOWNLOADING] foo v0.0.1 ([..])")
- .with_stderr_contains("[COMPILING] foo v0.0.1")
- .with_stderr_contains(&format!("[WARNING] {}", WARNING1))
- .with_stderr_contains(&format!("[WARNING] {}", WARNING2)));
+ assert_that(
+ upstream.cargo("build"),
+ execs()
+ .with_status(101)
+ .with_stdout_does_not_contain("hidden stdout")
+ .with_stderr_does_not_contain("hidden stderr")
+ .with_stderr_does_not_contain("[COMPILING] bar v0.0.1 ([..])")
+ .with_stderr_contains("[UPDATING] registry `[..]`")
+ .with_stderr_contains("[DOWNLOADING] foo v0.0.1 ([..])")
+ .with_stderr_contains("[COMPILING] foo v0.0.1")
+ .with_stderr_contains(&format!("[WARNING] {}", WARNING1))
+ .with_stderr_contains(&format!("[WARNING] {}", WARNING2)),
+ );
}
use std::io::{Read, Write};
use cargotest::sleep_ms;
-use cargotest::support::{project, execs, git};
+use cargotest::support::{execs, git, project};
use cargotest::support::registry::Package;
-use hamcrest::{assert_that, existing_file, existing_dir, is_not};
+use hamcrest::{assert_that, existing_dir, existing_file, is_not};
#[test]
fn simple_explicit() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
workspace = ".."
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), is_not(existing_file()));
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), existing_file());
#[test]
fn simple_explicit_default_members() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar"]
default-members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
workspace = ".."
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
#[test]
fn inferred_root() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), is_not(existing_file()));
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), existing_file());
#[test]
fn inferred_path_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
bar = { path = "bar" }
[workspace]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}")
.file("bar/src/lib.rs", "");
let p = p.build();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), is_not(existing_file()));
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), existing_file());
#[test]
fn transitive_path_dep() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
bar = { path = "bar" }
[workspace]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
baz = { path = "../baz" }
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}")
.file("bar/src/lib.rs", "")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("baz/src/main.rs", "fn main() {}")
.file("baz/src/lib.rs", "");
let p = p.build();
assert_that(&p.bin("bar"), is_not(existing_file()));
assert_that(&p.bin("baz"), is_not(existing_file()));
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), existing_file());
assert_that(&p.bin("baz"), is_not(existing_file()));
- assert_that(p.cargo("build").cwd(p.root().join("baz")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("baz")),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), existing_file());
assert_that(&p.bin("baz"), existing_file());
#[test]
fn parent_pointer_works() {
let p = project("foo")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
bar = { path = "../bar" }
[workspace]
- "#)
+ "#,
+ )
.file("foo/src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
workspace = "../foo"
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}")
.file("bar/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("foo")),
- execs().with_status(0));
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("foo")),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("foo/Cargo.lock"), existing_file());
assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
}
#[test]
fn same_names_in_workspace() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
workspace = ".."
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: two packages named `foo` in this workspace:
- [..]Cargo.toml
- [..]Cargo.toml
-"));
+",
+ ),
+ );
}
#[test]
fn parent_doesnt_point_to_child() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
[workspace]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(101).with_stderr(
+ "\
error: current package believes it's in a workspace when it's not:
current: [..]Cargo.toml
workspace: [..]Cargo.toml
this may be fixable [..]
-"));
+",
+ ),
+ );
}
#[test]
fn invalid_parent_pointer() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
workspace = "foo"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to read `[..]Cargo.toml`
Caused by:
[..]
-"));
+",
+ ),
+ );
}
#[test]
fn invalid_members() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["foo"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to read `[..]Cargo.toml`
Caused by:
[..]
-"));
+",
+ ),
+ );
}
#[test]
fn bare_workspace_ok() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
[workspace]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}");
let p = p.build();
#[test]
fn two_roots() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
[workspace]
members = [".."]
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: multiple workspace roots found in the same workspace:
[..]
[..]
-"));
+",
+ ),
+ );
}
#[test]
fn workspace_isnt_root() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
workspace = "bar"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: root of a workspace inferred but wasn't a root: [..]
-"));
+",
+ ),
+ );
}
#[test]
fn dangling_member() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
workspace = "../baz"
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.1.0"
authors = []
workspace = "../baz"
- "#)
+ "#,
+ )
.file("baz/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: package `[..]` is a member of the wrong workspace
expected: [..]
actual: [..]
-"));
+",
+ ),
+ );
}
#[test]
fn cycle() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
workspace = "bar"
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
workspace = ".."
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101));
+ assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn share_dependencies() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
dep1 = "< 0.1.5"
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
Package::new("dep1", "0.1.3").publish();
Package::new("dep1", "0.1.8").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] dep1 v0.1.3 ([..])
[COMPILING] dep1 v0.1.3
[COMPILING] foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn fetch_fetches_all() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
dep1 = "*"
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
Package::new("dep1", "0.1.3").publish();
- assert_that(p.cargo("fetch"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("fetch"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
[DOWNLOADING] dep1 v0.1.3 ([..])
-"));
+",
+ ),
+ );
}
#[test]
fn lock_works_for_everyone() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
dep1 = "0.1"
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
Package::new("dep1", "0.1.0").publish();
Package::new("dep2", "0.1.0").publish();
- assert_that(p.cargo("generate-lockfile"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("generate-lockfile"),
+ execs().with_status(0).with_stderr(
+ "\
[UPDATING] registry `[..]`
-"));
+",
+ ),
+ );
Package::new("dep1", "0.1.1").publish();
Package::new("dep2", "0.1.1").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(0).with_stderr(
+ "\
[DOWNLOADING] dep2 v0.1.0 ([..])
[COMPILING] dep2 v0.1.0
[COMPILING] foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
-
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0).with_stderr(
+ "\
[DOWNLOADING] dep1 v0.1.0 ([..])
[COMPILING] dep1 v0.1.0
[COMPILING] bar v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn virtual_works() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["bar"]
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("Cargo.lock"), existing_file());
assert_that(&p.bin("bar"), existing_file());
assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
#[test]
fn explicit_package_argument_works_with_virtual_manifest() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["bar"]
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root()).args(&["--package", "bar"]),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root()).args(&["--package", "bar"]),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("Cargo.lock"), existing_file());
assert_that(&p.bin("bar"), existing_file());
assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
#[test]
fn virtual_misconfigure() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(101).with_stderr(
+ "\
error: current package believes it's in a workspace when it's not:
current: [..]bar[..]Cargo.toml
workspace: [..]Cargo.toml
this may be fixable by adding `bar` to the `workspace.members` array of the \
manifest located at: [..]
-"));
+",
+ ),
+ );
}
#[test]
fn virtual_build_all_implied() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["bar"]
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn virtual_default_members() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["bar", "baz"]
default-members = ["bar"]
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}")
.file("baz/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("bar"), existing_file());
assert_that(&p.bin("baz"), is_not(existing_file()));
}
#[test]
fn virtual_default_member_is_not_a_member() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["bar"]
default-members = ["something-else"]
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: package `[..]something-else` is listed in workspace’s default-members \
but is not a member.
-"));
+",
+ ),
+ );
}
#[test]
fn virtual_build_no_members() {
- let p = project("foo")
- .file("Cargo.toml", r#"
+ let p = project("foo").file(
+ "Cargo.toml",
+ r#"
[workspace]
- "#);
+ "#,
+ );
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: manifest path `[..]` contains no package: The manifest is virtual, \
and the workspace has no members.
-"));
+",
+ ),
+ );
}
#[test]
fn include_virtual() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
[workspace]
members = ["bar"]
- "#)
+ "#,
+ )
.file("src/main.rs", "")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[workspace]
- "#);
+ "#,
+ );
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: multiple workspace roots found in the same workspace:
[..]
[..]
-"));
+",
+ ),
+ );
}
#[test]
fn members_include_path_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
p3 = { path = "p3" }
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("p1/Cargo.toml", r#"
+ .file(
+ "p1/Cargo.toml",
+ r#"
[project]
name = "p1"
version = "0.1.0"
[dependencies]
p2 = { path = "../p2" }
- "#)
+ "#,
+ )
.file("p1/src/lib.rs", "")
- .file("p2/Cargo.toml", r#"
+ .file(
+ "p2/Cargo.toml",
+ r#"
[project]
name = "p2"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("p2/src/lib.rs", "")
- .file("p3/Cargo.toml", r#"
+ .file(
+ "p3/Cargo.toml",
+ r#"
[project]
name = "p3"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("p3/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("p1")),
- execs().with_status(0));
- assert_that(p.cargo("build").cwd(p.root().join("p2")),
- execs().with_status(0));
- assert_that(p.cargo("build").cwd(p.root().join("p3")),
- execs().with_status(0));
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("p1")),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build").cwd(p.root().join("p2")),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build").cwd(p.root().join("p3")),
+ execs().with_status(0),
+ );
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.root().join("target"), existing_dir());
assert_that(&p.root().join("p1/target"), is_not(existing_dir()));
#[test]
fn new_warns_you_this_will_not_work() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
[workspace]
- "#)
+ "#,
+ )
.file("src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("new").arg("--lib").arg("bar").env("USER", "foo"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("new").arg("--lib").arg("bar").env("USER", "foo"),
+ execs().with_status(0).with_stderr(
+ "\
warning: compiling this new crate may not work due to invalid workspace \
configuration
this may be fixable by ensuring that this crate is depended on by the workspace \
root: [..]
[CREATED] library `bar` project
-"));
+",
+ ),
+ );
}
#[test]
fn lock_doesnt_change_depending_on_crate() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
foo = "*"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("baz/Cargo.toml", r#"
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.1.0"
[dependencies]
bar = "*"
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", "");
let p = p.build();
Package::new("foo", "1.0.0").publish();
Package::new("bar", "1.0.0").publish();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
let mut lockfile = String::new();
t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile));
- assert_that(p.cargo("build").cwd(p.root().join("baz")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("baz")),
+ execs().with_status(0),
+ );
let mut lockfile2 = String::new();
t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile2));
#[test]
fn rebuild_please() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ['lib', 'bin']
- "#)
- .file("lib/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "lib/Cargo.toml",
+ r#"
[package]
name = "lib"
version = "0.1.0"
- "#)
- .file("lib/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "lib/src/lib.rs",
+ r#"
pub fn foo() -> u32 { 0 }
- "#)
- .file("bin/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bin/Cargo.toml",
+ r#"
[package]
name = "bin"
version = "0.1.0"
[dependencies]
lib = { path = "../lib" }
- "#)
- .file("bin/src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "bin/src/main.rs",
+ r#"
extern crate lib;
fn main() {
assert_eq!(lib::foo(), 0);
}
- "#);
+ "#,
+ );
let p = p.build();
- assert_that(p.cargo("run").cwd(p.root().join("bin")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("run").cwd(p.root().join("bin")),
+ execs().with_status(0),
+ );
sleep_ms(1000);
- t!(t!(File::create(p.root().join("lib/src/lib.rs"))).write_all(br#"
+ t!(t!(File::create(p.root().join("lib/src/lib.rs"))).write_all(
+ br#"
pub fn foo() -> u32 { 1 }
- "#));
-
- assert_that(p.cargo("build").cwd(p.root().join("lib")),
- execs().with_status(0));
-
- assert_that(p.cargo("run").cwd(p.root().join("bin")),
- execs().with_status(101));
+ "#
+ ));
+
+ assert_that(
+ p.cargo("build").cwd(p.root().join("lib")),
+ execs().with_status(0),
+ );
+
+ assert_that(
+ p.cargo("run").cwd(p.root().join("bin")),
+ execs().with_status(101),
+ );
}
#[test]
fn workspace_in_git() {
let git_project = git::new("dep1", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["foo"]
- "#)
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
}).unwrap();
let p = project("foo")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "lib"
version = "0.1.0"
[dependencies.foo]
git = '{}'
- "#, git_project.url()))
- .file("src/lib.rs", r#"
+ "#,
+ git_project.url()
+ ),
+ )
+ .file(
+ "src/lib.rs",
+ r#"
pub fn foo() -> u32 { 0 }
- "#);
+ "#,
+ );
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
}
-
#[test]
fn lockfile_can_specify_nonexistant_members() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["a"]
- "#)
- .file("a/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "a/Cargo.toml",
+ r#"
[project]
name = "a"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("a/src/main.rs", "fn main() {}")
- .file("Cargo.lock", r#"
+ .file(
+ "Cargo.lock",
+ r#"
[[package]]
name = "a"
version = "0.1.0"
[[package]]
name = "b"
version = "0.1.0"
- "#);
+ "#,
+ );
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("a")), execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("a")),
+ execs().with_status(0),
+ );
}
#[test]
fn you_cannot_generate_lockfile_for_empty_workspaces() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
- "#)
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("update"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("update"),
+ execs().with_status(101).with_stderr(
+ "\
error: you can't generate a lockfile for an empty workspace.
-"));
+",
+ ),
+ );
}
#[test]
fn workspace_with_transitive_dev_deps() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.5.0"
path = "bar"
[workspace]
- "#)
+ "#,
+ )
.file("src/main.rs", r#"fn main() {}"#)
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.5.0"
[dev-dependencies.baz]
path = "../baz"
- "#)
- .file("bar/src/lib.rs", r#"
+ "#,
+ )
+ .file(
+ "bar/src/lib.rs",
+ r#"
pub fn init() {}
#[cfg(test)]
extern crate baz;
baz::do_stuff();
}
- "#)
- .file("baz/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "baz/Cargo.toml",
+ r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["mbrubeck@example.com"]
- "#)
+ "#,
+ )
.file("baz/src/lib.rs", r#"pub fn do_stuff() {}"#);
let p = p.build();
- assert_that(p.cargo("test").args(&["-p", "bar"]),
- execs().with_status(0));
+ assert_that(p.cargo("test").args(&["-p", "bar"]), execs().with_status(0));
}
#[test]
fn error_if_parent_cargo_toml_is_invalid() {
let p = project("foo")
.file("Cargo.toml", "Totally not a TOML file")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("bar")),
- execs().with_status(101)
- .with_stderr_contains("\
-[ERROR] failed to parse manifest at `[..]`"));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(101).with_stderr_contains(
+ "\
+ [ERROR] failed to parse manifest at `[..]`",
+ ),
+ );
}
#[test]
fn relative_path_for_member_works() {
let p = project("foo")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["../bar"]
- "#)
+ "#,
+ )
.file("foo/src/main.rs", "fn main() {}")
- .file("bar/Cargo.toml", r#"
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
workspace = "../foo"
- "#)
+ "#,
+ )
.file("bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("foo")), execs().with_status(0));
- assert_that(p.cargo("build").cwd(p.root().join("bar")), execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("foo")),
+ execs().with_status(0),
+ );
+ assert_that(
+ p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0),
+ );
}
#[test]
fn relative_path_for_root_works() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
subproj = { path = "./subproj" }
- "#)
+ "#,
+ )
.file("src/main.rs", "fn main() {}")
- .file("subproj/Cargo.toml", r#"
+ .file(
+ "subproj/Cargo.toml",
+ r#"
[project]
name = "subproj"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("subproj/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root())
- .arg("--manifest-path").arg("./Cargo.toml"),
- execs().with_status(0));
-
- assert_that(p.cargo("build").cwd(p.root().join("subproj"))
- .arg("--manifest-path").arg("../Cargo.toml"),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build")
+ .cwd(p.root())
+ .arg("--manifest-path")
+ .arg("./Cargo.toml"),
+ execs().with_status(0),
+ );
+
+ assert_that(
+ p.cargo("build")
+ .cwd(p.root().join("subproj"))
+ .arg("--manifest-path")
+ .arg("../Cargo.toml"),
+ execs().with_status(0),
+ );
}
#[test]
fn path_dep_outside_workspace_is_not_member() {
let p = project("foo")
- .file("ws/Cargo.toml", r#"
+ .file(
+ "ws/Cargo.toml",
+ r#"
[project]
name = "ws"
version = "0.1.0"
foo = { path = "../foo" }
[workspace]
- "#)
+ "#,
+ )
.file("ws/src/lib.rs", r"extern crate foo;")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("ws")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("ws")),
+ execs().with_status(0),
+ );
}
#[test]
fn test_in_and_out_of_workspace() {
let p = project("foo")
- .file("ws/Cargo.toml", r#"
+ .file(
+ "ws/Cargo.toml",
+ r#"
[project]
name = "ws"
version = "0.1.0"
[workspace]
members = [ "../bar" ]
- "#)
- .file("ws/src/lib.rs", r"extern crate foo; pub fn f() { foo::f() }")
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "ws/src/lib.rs",
+ r"extern crate foo; pub fn f() { foo::f() }",
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "../bar" }
- "#)
- .file("foo/src/lib.rs", "extern crate bar; pub fn f() { bar::f() }")
- .file("bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ "extern crate bar; pub fn f() { bar::f() }",
+ )
+ .file(
+ "bar/Cargo.toml",
+ r#"
[project]
workspace = "../ws"
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("bar/src/lib.rs", "pub fn f() { }");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("ws")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("ws")),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("ws/Cargo.lock"), existing_file());
assert_that(&p.root().join("ws/target"), existing_dir());
assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
assert_that(&p.root().join("bar/target"), is_not(existing_dir()));
- assert_that(p.cargo("build").cwd(p.root().join("foo")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("foo")),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("foo/Cargo.lock"), existing_file());
assert_that(&p.root().join("foo/target"), existing_dir());
assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
#[test]
fn test_path_dependency_under_member() {
let p = project("foo")
- .file("ws/Cargo.toml", r#"
+ .file(
+ "ws/Cargo.toml",
+ r#"
[project]
name = "ws"
version = "0.1.0"
foo = { path = "../foo" }
[workspace]
- "#)
- .file("ws/src/lib.rs", r"extern crate foo; pub fn f() { foo::f() }")
- .file("foo/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "ws/src/lib.rs",
+ r"extern crate foo; pub fn f() { foo::f() }",
+ )
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
workspace = "../ws"
name = "foo"
[dependencies]
bar = { path = "./bar" }
- "#)
- .file("foo/src/lib.rs", "extern crate bar; pub fn f() { bar::f() }")
- .file("foo/bar/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "foo/src/lib.rs",
+ "extern crate bar; pub fn f() { bar::f() }",
+ )
+ .file(
+ "foo/bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/bar/src/lib.rs", "pub fn f() { }");
let p = p.build();
- assert_that(p.cargo("build").cwd(p.root().join("ws")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("ws")),
+ execs().with_status(0),
+ );
- assert_that(&p.root().join("foo/bar/Cargo.lock"), is_not(existing_file()));
+ assert_that(
+ &p.root().join("foo/bar/Cargo.lock"),
+ is_not(existing_file()),
+ );
assert_that(&p.root().join("foo/bar/target"), is_not(existing_dir()));
- assert_that(p.cargo("build").cwd(p.root().join("foo/bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("foo/bar")),
+ execs().with_status(0),
+ );
- assert_that(&p.root().join("foo/bar/Cargo.lock"), is_not(existing_file()));
+ assert_that(
+ &p.root().join("foo/bar/Cargo.lock"),
+ is_not(existing_file()),
+ );
assert_that(&p.root().join("foo/bar/target"), is_not(existing_dir()));
}
#[test]
fn excluded_simple() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "ws"
version = "0.1.0"
[workspace]
exclude = ["foo"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.root().join("target"), existing_dir());
- assert_that(p.cargo("build").cwd(p.root().join("foo")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("foo")),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("foo/target"), existing_dir());
}
#[test]
fn exclude_members_preferred() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "ws"
version = "0.1.0"
[workspace]
members = ["foo/bar"]
exclude = ["foo"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
- .file("foo/bar/Cargo.toml", r#"
+ .file(
+ "foo/bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/bar/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.root().join("target"), existing_dir());
- assert_that(p.cargo("build").cwd(p.root().join("foo")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("foo")),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("foo/target"), existing_dir());
- assert_that(p.cargo("build").cwd(p.root().join("foo/bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("foo/bar")),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("foo/bar/target"), is_not(existing_dir()));
}
#[test]
fn exclude_but_also_depend() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[project]
name = "ws"
version = "0.1.0"
[workspace]
exclude = ["foo"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("foo/Cargo.toml", r#"
+ .file(
+ "foo/Cargo.toml",
+ r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/src/lib.rs", "")
- .file("foo/bar/Cargo.toml", r#"
+ .file(
+ "foo/bar/Cargo.toml",
+ r#"
[project]
name = "bar"
version = "0.1.0"
authors = []
- "#)
+ "#,
+ )
.file("foo/bar/src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(0));
+ assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.root().join("target"), existing_dir());
- assert_that(p.cargo("build").cwd(p.root().join("foo")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("foo")),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("foo/target"), existing_dir());
- assert_that(p.cargo("build").cwd(p.root().join("foo/bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("foo/bar")),
+ execs().with_status(0),
+ );
assert_that(&p.root().join("foo/bar/target"), existing_dir());
}
assert_that(&p.bin("bar"), is_not(existing_file()));
assert_that(&p.bin("baz"), is_not(existing_file()));
- assert_that(p.cargo("build").cwd(p.root().join("crates/bar")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("crates/bar")),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), existing_file());
- assert_that(p.cargo("build").cwd(p.root().join("crates/baz")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("crates/baz")),
+ execs().with_status(0),
+ );
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("baz"), existing_file());
- assert_that(p.cargo("build").cwd(p.root().join("crates/qux")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").cwd(p.root().join("crates/qux")),
+ execs().with_status(0),
+ );
assert_that(&p.bin("qux"), is_not(existing_file()));
assert_that(&p.root().join("Cargo.lock"), existing_file());
- assert_that(&p.root().join("crates/bar/Cargo.lock"), is_not(existing_file()));
- assert_that(&p.root().join("crates/baz/Cargo.lock"), is_not(existing_file()));
+ assert_that(
+ &p.root().join("crates/bar/Cargo.lock"),
+ is_not(existing_file()),
+ );
+ assert_that(
+ &p.root().join("crates/baz/Cargo.lock"),
+ is_not(existing_file()),
+ );
assert_that(&p.root().join("crates/qux/Cargo.lock"), existing_file());
}
.file("crates/bar/src/main.rs", "fn main() {}");
let p = p.build();
- assert_that(p.cargo("build"),
- execs().with_status(101)
- .with_stderr("\
+ assert_that(
+ p.cargo("build"),
+ execs().with_status(101).with_stderr(
+ "\
error: failed to read `[..]Cargo.toml`
Caused by:
[..]
-"));
+",
+ ),
+ );
}
/// This is a freshness test for feature use with workspaces
#[test]
fn dep_used_with_separate_features() {
let p = project("foo")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[workspace]
members = ["feat_lib", "caller1", "caller2"]
- "#)
- .file("feat_lib/Cargo.toml", r#"
+ "#,
+ )
+ .file(
+ "feat_lib/Cargo.toml",
+ r#"
[project]
name = "feat_lib"
version = "0.1.0"
[features]
myfeature = []
- "#)
+ "#,
+ )
.file("feat_lib/src/lib.rs", "")
- .file("caller1/Cargo.toml", r#"
+ .file(
+ "caller1/Cargo.toml",
+ r#"
[project]
name = "caller1"
version = "0.1.0"
[dependencies]
feat_lib = { path = "../feat_lib" }
- "#)
+ "#,
+ )
.file("caller1/src/main.rs", "fn main() {}")
.file("caller1/src/lib.rs", "")
- .file("caller2/Cargo.toml", r#"
+ .file(
+ "caller2/Cargo.toml",
+ r#"
[project]
name = "caller2"
version = "0.1.0"
[dependencies]
feat_lib = { path = "../feat_lib", features = ["myfeature"] }
caller1 = { path = "../caller1" }
- "#)
+ "#,
+ )
.file("caller2/src/main.rs", "fn main() {}")
.file("caller2/src/lib.rs", "");
let p = p.build();
// Build the entire workspace
- assert_that(p.cargo("build").arg("--all"),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").arg("--all"),
+ execs().with_status(0).with_stderr(
+ "\
[..]Compiling feat_lib v0.1.0 ([..])
[..]Compiling caller1 v0.1.0 ([..])
[..]Compiling caller2 v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
assert_that(&p.bin("caller1"), existing_file());
assert_that(&p.bin("caller2"), existing_file());
-
// Build caller1. should build the dep library. Because the features
// are different than the full workspace, it rebuilds.
// Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then
// a single cargo build at the top level will be enough.
- assert_that(p.cargo("build").cwd(p.root().join("caller1")),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").cwd(p.root().join("caller1")),
+ execs().with_status(0).with_stderr(
+ "\
[..]Compiling feat_lib v0.1.0 ([..])
[..]Compiling caller1 v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
// Alternate building caller2/caller1 a few times, just to make sure
// features are being built separately. Should not rebuild anything
- assert_that(p.cargo("build").cwd(p.root().join("caller2")),
- execs().with_status(0)
- .with_stderr("\
+ assert_that(
+ p.cargo("build").cwd(p.root().join("caller2")),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build").cwd(p.root().join("caller1")),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build").cwd(p.root().join("caller1")),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
- assert_that(p.cargo("build").cwd(p.root().join("caller2")),
- execs().with_status(0)
- .with_stderr("\
+",
+ ),
+ );
+ assert_that(
+ p.cargo("build").cwd(p.root().join("caller2")),
+ execs().with_status(0).with_stderr(
+ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-"));
+",
+ ),
+ );
}
#[test]
fn dont_recurse_out_of_cargo_home() {
let git_project = git::new("dep", |project| {
project
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "dep"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("build.rs", r#"
+ .file(
+ "build.rs",
+ r#"
use std::env;
use std::path::Path;
use std::process::{self, Command};
process::exit(1);
}
}
- "#)
+ "#,
+ )
}).unwrap();
let p = project("lib")
- .file("Cargo.toml", &format!(r#"
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
[package]
name = "lib"
version = "0.1.0"
git = "{}"
[workspace]
- "#, git_project.url()))
+ "#,
+ git_project.url()
+ ),
+ )
.file("src/lib.rs", "");
let p = p.build();
- assert_that(p.cargo("build").env("CARGO_HOME", p.root().join(".cargo")),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").env("CARGO_HOME", p.root().join(".cargo")),
+ execs().with_status(0),
+ );
}
/*FIXME: This fails because of how workspace.exclude and workspace.members are working.
#[test]
fn cargo_home_at_root_works() {
let p = project("lib")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "lib"
version = "0.1.0"
[workspace]
members = ["a"]
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
- .file("a/Cargo.toml", r#"
+ .file(
+ "a/Cargo.toml",
+ r#"
[package]
name = "a"
version = "0.1.0"
- "#)
+ "#,
+ )
.file("a/src/lib.rs", "");
let p = p.build();
assert_that(p.cargo("build"), execs().with_status(0));
- assert_that(p.cargo("build").arg("--frozen").env("CARGO_HOME", p.root()),
- execs().with_status(0));
+ assert_that(
+ p.cargo("build").arg("--frozen").env("CARGO_HOME", p.root()),
+ execs().with_status(0),
+ );
}
#[test]
fn relative_rustc() {
let p = project("the_exe")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "foo"
version = "0.1.0"
- "#)
- .file("src/main.rs", r#"
+ "#,
+ )
+ .file(
+ "src/main.rs",
+ r#"
use std::process::Command;
use std::env;
}
std::process::exit(cmd.status().unwrap().code().unwrap());
}
- "#)
+ "#,
+ )
.build();
assert_that(p.cargo("build"), execs().with_status(0));
Package::new("a", "0.1.0").publish();
let p = project("lib")
- .file("Cargo.toml", r#"
+ .file(
+ "Cargo.toml",
+ r#"
[package]
name = "lib"
version = "0.1.0"
[dependencies]
a = "0.1"
- "#)
+ "#,
+ )
.file("src/lib.rs", "")
.build();
fs::copy(&src, p.root().join(src.file_name().unwrap())).unwrap();
let file = format!("./foo{}", env::consts::EXE_SUFFIX);
- assert_that(p.cargo("build").env("RUSTC", &file),
- execs().with_status(0));
+ assert_that(p.cargo("build").env("RUSTC", &file), execs().with_status(0));
}